Test Report: Hyper-V_Windows 20604

                    
                      18ead8fd12890e86b803b7c091eba160ddf37424:2025-04-08:39059
                    
                

Test fail (38/193)

Order failed test Duration
49 TestCertExpiration 10800.408
58 TestErrorSpam/setup 198.44
83 TestFunctional/serial/ExtraConfig 284.39
84 TestFunctional/serial/ComponentHealth 180.66
87 TestFunctional/serial/InvalidService 4.24
93 TestFunctional/parallel/StatusCmd 249.31
97 TestFunctional/parallel/ServiceCmdConnect 181.19
99 TestFunctional/parallel/PersistentVolumeClaim 554.22
103 TestFunctional/parallel/MySQL 172.41
109 TestFunctional/parallel/NodeLabels 301.41
114 TestFunctional/parallel/ServiceCmd/DeployApp 2.17
116 TestFunctional/parallel/ServiceCmd/List 8.93
117 TestFunctional/parallel/ServiceCmd/JSONOutput 8.61
119 TestFunctional/parallel/ServiceCmd/HTTPS 8.62
120 TestFunctional/parallel/ServiceCmd/Format 8.7
122 TestFunctional/parallel/ServiceCmd/URL 8.61
124 TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel 7.71
127 TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup 4.23
135 TestFunctional/parallel/ImageCommands/ImageListShort 46.56
136 TestFunctional/parallel/ImageCommands/ImageListTable 46.86
137 TestFunctional/parallel/ImageCommands/ImageListJson 60.12
138 TestFunctional/parallel/ImageCommands/ImageListYaml 44.48
139 TestFunctional/parallel/ImageCommands/ImageBuild 120.39
141 TestFunctional/parallel/ImageCommands/ImageLoadDaemon 121.68
142 TestFunctional/parallel/ImageCommands/ImageReloadDaemon 120.5
143 TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon 120.44
144 TestFunctional/parallel/DockerEnv/powershell 471.35
145 TestFunctional/parallel/ImageCommands/ImageSaveToFile 60.22
150 TestFunctional/parallel/ImageCommands/ImageLoadFromFile 0.46
161 TestMultiControlPlane/serial/PingHostFromPods 70.15
166 TestMultiControlPlane/serial/StopSecondaryNode 48.85
220 TestMultiNode/serial/FreshStart2Nodes 488.02
221 TestMultiNode/serial/DeployApp2Nodes 750.63
222 TestMultiNode/serial/PingHostFrom2Pods 51.8
223 TestMultiNode/serial/AddNode 282.09
226 TestMultiNode/serial/CopyFile 72.94
227 TestMultiNode/serial/StopNode 123.88
254 TestNoKubernetes/serial/StartWithK8s 299.9
x
+
TestCertExpiration (10800.408s)

                                                
                                                
=== RUN   TestCertExpiration
=== PAUSE TestCertExpiration

                                                
                                                

                                                
                                                
=== CONT  TestCertExpiration
cert_options_test.go:123: (dbg) Run:  out/minikube-windows-amd64.exe start -p cert-expiration-681300 --memory=2048 --cert-expiration=3m --driver=hyperv
E0408 20:41:05.489631    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
cert_options_test.go:123: (dbg) Done: out/minikube-windows-amd64.exe start -p cert-expiration-681300 --memory=2048 --cert-expiration=3m --driver=hyperv: (6m40.0415661s)
panic: test timed out after 3h0m0s
	running tests:
		TestCertExpiration (7m43s)
		TestCertOptions (5m16s)
		TestDockerFlags (7m0s)
		TestNetworkPlugins (12m1s)
		TestRunningBinaryUpgrade (2m34s)

                                                
                                                
goroutine 2244 [running]:
testing.(*M).startAlarm.func1()
	/usr/local/go/src/testing/testing.go:2484 +0x394
created by time.goFunc
	/usr/local/go/src/time/sleep.go:215 +0x2d

                                                
                                                
goroutine 1 [chan receive, 8 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1753 +0x486
testing.tRunner(0xc000586700, 0xc000b5bbc8)
	/usr/local/go/src/testing/testing.go:1798 +0x104
testing.runTests(0xc00078e000, {0x52b72c0, 0x2b, 0x2b}, {0xffffffffffffffff?, 0xc0014520d0?, 0x52de640?})
	/usr/local/go/src/testing/testing.go:2277 +0x4b4
testing.(*M).Run(0xc00051c5a0)
	/usr/local/go/src/testing/testing.go:2142 +0x64a
k8s.io/minikube/test/integration.TestMain(0xc00051c5a0)
	/home/jenkins/workspace/Build_Cross/test/integration/main_test.go:62 +0x8b
main.main()
	_testmain.go:131 +0xa8

                                                
                                                
goroutine 159 [chan receive, 173 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0xc00080c580, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x289
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 157
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x569

                                                
                                                
goroutine 144 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3878810, 0xc0000781c0}, 0xc001465f50, 0xc001465f98)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xd1
k8s.io/apimachinery/pkg/util/wait.poll({0x3878810, 0xc0000781c0}, 0x90?, 0xc001465f50, 0xc001465f98)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x89
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3878810?, 0xc0000781c0?}, 0x0?, 0x0?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x45
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0xc001465fd0?, 0x39cc04?, 0xc00056a850?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x36
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 159
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x27a

                                                
                                                
goroutine 688 [sleep, 2 minutes]:
time.Sleep(0x29e8d60800)
	/usr/local/go/src/runtime/time.go:338 +0x167
k8s.io/minikube/test/integration.TestCertExpiration(0xc0019d4fc0)
	/home/jenkins/workspace/Build_Cross/test/integration/cert_options_test.go:129 +0x3c5
testing.tRunner(0xc0019d4fc0, 0x34df6c0)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 158 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3889620)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x345
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 157
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x245

                                                
                                                
goroutine 687 [syscall, 6 minutes]:
syscall.Syscall(0xc00008ba58?, 0x0?, 0x36043b?, 0x1000000000000?, 0x1e?)
	/usr/local/go/src/runtime/syscall_windows.go:457 +0x29
syscall.WaitForSingleObject(0x538, 0xffffffff)
	/usr/local/go/src/syscall/zsyscall_windows.go:1149 +0x5a
os.(*Process).wait(0xc001686180?)
	/usr/local/go/src/os/exec_windows.go:28 +0xe6
os.(*Process).Wait(...)
	/usr/local/go/src/os/exec.go:358
os/exec.(*Cmd).Wait(0xc001686180)
	/usr/local/go/src/os/exec/exec.go:922 +0x45
os/exec.(*Cmd).Run(0xc001686180)
	/usr/local/go/src/os/exec/exec.go:626 +0x2d
k8s.io/minikube/test/integration.Run(0xc0019d48c0, 0xc001686180)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:103 +0x1e5
k8s.io/minikube/test/integration.TestCertOptions(0xc0019d48c0)
	/home/jenkins/workspace/Build_Cross/test/integration/cert_options_test.go:49 +0x425
testing.tRunner(0xc0019d48c0, 0x34df6c8)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2099 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0019d5880)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0019d5880)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc0019d5880)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc0019d5880, 0xc000612380)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2104 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc000485a40)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc000485a40)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc000485a40)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc000485a40, 0xc000612680)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 143 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0xc00080c550, 0x3b)
	/usr/local/go/src/runtime/sema.go:597 +0x15d
sync.(*Cond).Wait(0xc001739d80?)
	/usr/local/go/src/sync/cond.go:71 +0x85
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x388c4e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x86
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0xc00080c580)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x44
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0xc000808008?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x33
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc000510020, {0x38396c0, 0xc0009585a0}, 0x1, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0xaf
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0xc000510020, 0x3b9aca00, 0x0, 0x1, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x7f
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 159
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x1cf

                                                
                                                
goroutine 1055 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.waitForWithContext({0x3878810, 0xc0000781c0}, 0xc0016a3f50, 0xc0016a3f98)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/wait.go:205 +0xd1
k8s.io/apimachinery/pkg/util/wait.poll({0x3878810, 0xc0000781c0}, 0x11?, 0xc0016a3f50, 0xc0016a3f98)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:260 +0x89
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext({0x3878810?, 0xc0000781c0?}, 0xc0019d5a40?, 0x3635c0?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:200 +0x45
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntil(0x364525?, 0xc0019d5a40?, 0xc0005d8940?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:187 +0x36
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 1030
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:145 +0x27a

                                                
                                                
goroutine 2160 [syscall]:
syscall.Syscall6(0x25ec7470a38?, 0x10000?, 0x4000?, 0xc000500008?, 0xc00096c000?, 0xc00147fbf0?, 0x278665?, 0xc00095fbe8?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x5fc, {0xc000976b77?, 0x5489, 0x2cdf1f?}, 0x10000?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc00164e248?, {0xc000976b77?, 0x0?, 0x0?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc00164e248, {0xc000976b77, 0x5489, 0x5489})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc000534128, {0xc000976b77?, 0x55b?, 0x55b?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc0017e2390, {0x3837c00, 0xc0000c6a90})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc0017e2390}, {0x3837c00, 0xc0000c6a90}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0xc00147fe90?, {0x3837d80, 0xc0017e2390})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0xc00147feb0?, {0x3837d80?, 0xc0017e2390?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc0017e2390}, {0x3837ce0, 0xc000534128}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0xc00144ed00?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 692
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                                
goroutine 145 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x19b
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 144
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xbb

                                                
                                                
goroutine 2228 [syscall, 2 minutes]:
syscall.Syscall6(0x25eecce2d70?, 0x25ec7470108?, 0x200?, 0xc000a58808?, 0xc000984200?, 0xc00095fbf0?, 0x278659?, 0x35?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x5e4, {0xc000984200?, 0x200, 0x0?}, 0x200?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc000b09688?, {0xc000984200?, 0x0?, 0x29501727700?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc000b09688, {0xc000984200, 0x200, 0x200})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc0001621d0, {0xc000984200?, 0x216d3f?, 0x2625520?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc00168b980, {0x3837c00, 0xc000534020})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc00168b980}, {0x3837c00, 0xc000534020}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0xc0018927b0?, {0x3837d80, 0xc00168b980})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0x0?, {0x3837d80?, 0xc00168b980?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc00168b980}, {0x3837ce0, 0xc0001621d0}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0xc00144ed00?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 2084
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                                
goroutine 2102 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0019d5dc0)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0019d5dc0)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc0019d5dc0)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc0019d5dc0, 0xc000612500)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2103 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc00184ca80)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc00184ca80)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc00184ca80)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc00184ca80, 0xc000612580)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 692 [syscall, 8 minutes]:
syscall.Syscall(0xc001937a28?, 0x0?, 0x36043b?, 0x1000000000000?, 0x1e?)
	/usr/local/go/src/runtime/syscall_windows.go:457 +0x29
syscall.WaitForSingleObject(0x734, 0xffffffff)
	/usr/local/go/src/syscall/zsyscall_windows.go:1149 +0x5a
os.(*Process).wait(0xc000a82f00?)
	/usr/local/go/src/os/exec_windows.go:28 +0xe6
os.(*Process).Wait(...)
	/usr/local/go/src/os/exec.go:358
os/exec.(*Cmd).Wait(0xc000a82f00)
	/usr/local/go/src/os/exec/exec.go:922 +0x45
os/exec.(*Cmd).Run(0xc000a82f00)
	/usr/local/go/src/os/exec/exec.go:626 +0x2d
k8s.io/minikube/test/integration.Run(0xc00184c000, 0xc000a82f00)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:103 +0x1e5
k8s.io/minikube/test/integration.TestDockerFlags(0xc00184c000)
	/home/jenkins/workspace/Build_Cross/test/integration/docker_test.go:51 +0x495
testing.tRunner(0xc00184c000, 0x34df6d8)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2086 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc001609340)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc001609340)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestKubernetesUpgrade(0xc001609340)
	/home/jenkins/workspace/Build_Cross/test/integration/version_upgrade_test.go:215 +0x39
testing.tRunner(0xc001609340, 0x34df778)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2105 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc000485c00)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc000485c00)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc000485c00)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc000485c00, 0xc000612780)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2100 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0019d5a40)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0019d5a40)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc0019d5a40)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc0019d5a40, 0xc000612400)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 719 [IO wait, 160 minutes]:
internal/poll.runtime_pollWait(0x25eeccdecb8, 0x72)
	/usr/local/go/src/runtime/netpoll.go:351 +0x85
internal/poll.(*pollDesc).wait(0x2ccbb3?, 0x0?, 0x0)
	/usr/local/go/src/internal/poll/fd_poll_runtime.go:84 +0x27
internal/poll.execIO(0xc00017a2a0, 0xc000967ba0)
	/usr/local/go/src/internal/poll/fd_windows.go:177 +0x105
internal/poll.(*FD).acceptOne(0xc00017a288, 0x394, {0xc000246780?, 0xc000967c00?, 0x2d72e5?}, 0xc000967c34?)
	/usr/local/go/src/internal/poll/fd_windows.go:946 +0x65
internal/poll.(*FD).Accept(0xc00017a288, 0xc000967d80)
	/usr/local/go/src/internal/poll/fd_windows.go:980 +0x1b6
net.(*netFD).accept(0xc00017a288)
	/usr/local/go/src/net/fd_windows.go:182 +0x4b
net.(*TCPListener).accept(0xc000b621c0)
	/usr/local/go/src/net/tcpsock_posix.go:159 +0x1b
net.(*TCPListener).Accept(0xc000b621c0)
	/usr/local/go/src/net/tcpsock.go:380 +0x30
net/http.(*Server).Serve(0xc000b7a000, {0x38671c0, 0xc000b621c0})
	/usr/local/go/src/net/http/server.go:3424 +0x30c
net/http.(*Server).ListenAndServe(0xc000b7a000)
	/usr/local/go/src/net/http/server.go:3350 +0x71
k8s.io/minikube/test/integration.startHTTPProxy.func1(...)
	/home/jenkins/workspace/Build_Cross/test/integration/functional_test.go:2230
created by k8s.io/minikube/test/integration.startHTTPProxy in goroutine 716
	/home/jenkins/workspace/Build_Cross/test/integration/functional_test.go:2229 +0x129

                                                
                                                
goroutine 2101 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0019d5c00)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0019d5c00)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc0019d5c00)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc0019d5c00, 0xc000612480)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2085 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc001609180)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc001609180)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestStoppedBinaryUpgrade(0xc001609180)
	/home/jenkins/workspace/Build_Cross/test/integration/version_upgrade_test.go:143 +0x85
testing.tRunner(0xc001609180, 0x34df800)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2082 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc001608a80)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc001608a80)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestStartStop(0xc001608a80)
	/home/jenkins/workspace/Build_Cross/test/integration/start_stop_delete_test.go:44 +0x18
testing.tRunner(0xc001608a80, 0x34df7f8)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2106 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc000485dc0)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc000485dc0)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc000485dc0)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc000485dc0, 0xc000612800)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 1083 [chan send, 138 minutes]:
os/exec.(*Cmd).watchCtx(0xc001784c00, 0xc0017b4770)
	/usr/local/go/src/os/exec/exec.go:814 +0x3e5
created by os/exec.(*Cmd).Start in goroutine 919
	/usr/local/go/src/os/exec/exec.go:775 +0x989

                                                
                                                
goroutine 1030 [chan receive, 138 minutes]:
k8s.io/client-go/transport.(*dynamicClientCert).Run(0xc0005d9b40, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:150 +0x289
created by k8s.io/client-go/transport.(*tlsTransportCache).get in goroutine 928
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cache.go:122 +0x569

                                                
                                                
goroutine 2049 [chan receive, 12 minutes]:
testing.tRunner.func1()
	/usr/local/go/src/testing/testing.go:1753 +0x486
testing.tRunner(0xc000607180, 0xc0009b4918)
	/usr/local/go/src/testing/testing.go:1798 +0x104
created by testing.(*T).Run in goroutine 1990
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2084 [syscall, 2 minutes]:
syscall.Syscall(0xc00193b7f0?, 0x0?, 0x36043b?, 0x1000000000000?, 0x4d?)
	/usr/local/go/src/runtime/syscall_windows.go:457 +0x29
syscall.WaitForSingleObject(0x354, 0xffffffff)
	/usr/local/go/src/syscall/zsyscall_windows.go:1149 +0x5a
os.(*Process).wait(0xc001686600?)
	/usr/local/go/src/os/exec_windows.go:28 +0xe6
os.(*Process).Wait(...)
	/usr/local/go/src/os/exec.go:358
os/exec.(*Cmd).Wait(0xc001686600)
	/usr/local/go/src/os/exec/exec.go:922 +0x45
os/exec.(*Cmd).Run(0xc001686600)
	/usr/local/go/src/os/exec/exec.go:626 +0x2d
k8s.io/minikube/test/integration.Run(0xc001608e00, 0xc001686600)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:103 +0x1e5
k8s.io/minikube/test/integration.TestRunningBinaryUpgrade.func1()
	/home/jenkins/workspace/Build_Cross/test/integration/version_upgrade_test.go:120 +0x36d
github.com/cenkalti/backoff/v4.RetryNotifyWithTimer.Operation.withEmptyData.func1()
	/home/jenkins/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.3.0/retry.go:18 +0x13
github.com/cenkalti/backoff/v4.doRetryNotify[...](0xc00156dc38?, {0x385a098, 0xc000806440}, 0x34e09e0, {0x0, 0x0?})
	/home/jenkins/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.3.0/retry.go:88 +0x11c
github.com/cenkalti/backoff/v4.RetryNotifyWithTimer(0x0?, {0x385a098?, 0xc000806440?}, 0x40?, {0x0?, 0x0?})
	/home/jenkins/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.3.0/retry.go:61 +0x56
github.com/cenkalti/backoff/v4.RetryNotify(...)
	/home/jenkins/go/pkg/mod/github.com/cenkalti/backoff/v4@v4.3.0/retry.go:49
k8s.io/minikube/pkg/util/retry.Expo(0xc00193be08, 0x3b9aca00, 0x1a3185c5000, {0xc00193bd10?, 0x24604a0?, 0x5170720?})
	/home/jenkins/workspace/Build_Cross/pkg/util/retry/retry.go:60 +0xe5
k8s.io/minikube/test/integration.TestRunningBinaryUpgrade(0xc001608e00)
	/home/jenkins/workspace/Build_Cross/test/integration/version_upgrade_test.go:125 +0x4a5
testing.tRunner(0xc001608e00, 0x34df7d8)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 1029 [select]:
k8s.io/client-go/util/workqueue.(*delayingType[...]).waitingLoop(0x3889620)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:311 +0x345
created by k8s.io/client-go/util/workqueue.newDelayingQueue[...] in goroutine 928
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/delaying_queue.go:148 +0x245

                                                
                                                
goroutine 1990 [chan receive, 12 minutes]:
testing.(*T).Run(0xc000b78a80, {0x2b44d55?, 0xc002175f60?}, 0xc0009b4918)
	/usr/local/go/src/testing/testing.go:1859 +0x414
k8s.io/minikube/test/integration.TestNetworkPlugins(0xc000b78a80)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:52 +0xd3
testing.tRunner(0xc000b78a80, 0x34df7b0)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 1054 [sync.Cond.Wait, 2 minutes]:
sync.runtime_notifyListWait(0xc0005d9b10, 0x32)
	/usr/local/go/src/runtime/sema.go:597 +0x15d
sync.(*Cond).Wait(0xc00156fd80?)
	/usr/local/go/src/sync/cond.go:71 +0x85
k8s.io/client-go/util/workqueue.(*Typed[...]).Get(0x388c4e0)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/util/workqueue/queue.go:277 +0x86
k8s.io/client-go/transport.(*dynamicClientCert).processNextWorkItem(0xc0005d9b40)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:159 +0x44
k8s.io/client-go/transport.(*dynamicClientCert).runWorker(...)
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:154
k8s.io/apimachinery/pkg/util/wait.BackoffUntil.func1(0xc0000d8008?)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:226 +0x33
k8s.io/apimachinery/pkg/util/wait.BackoffUntil(0xc0017d9c90, {0x38396c0, 0xc0013bec60}, 0x1, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:227 +0xaf
k8s.io/apimachinery/pkg/util/wait.JitterUntil(0xc0017d9c90, 0x3b9aca00, 0x0, 0x1, 0xc0000781c0)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:204 +0x7f
k8s.io/apimachinery/pkg/util/wait.Until(...)
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/backoff.go:161
created by k8s.io/client-go/transport.(*dynamicClientCert).Run in goroutine 1030
	/home/jenkins/go/pkg/mod/k8s.io/client-go@v0.32.2/transport/cert_rotation.go:143 +0x1cf

                                                
                                                
goroutine 2098 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0019d56c0)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0019d56c0)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestNetworkPlugins.func1.1(0xc0019d56c0)
	/home/jenkins/workspace/Build_Cross/test/integration/net_test.go:106 +0x317
testing.tRunner(0xc0019d56c0, 0xc000612300)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 2049
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 1056 [select, 2 minutes]:
k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1.1()
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:297 +0x19b
created by k8s.io/apimachinery/pkg/util/wait.PollImmediateUntilWithContext.poller.func1 in goroutine 1055
	/home/jenkins/go/pkg/mod/k8s.io/apimachinery@v0.32.2/pkg/util/wait/poll.go:280 +0xbb

                                                
                                                
goroutine 1992 [chan receive, 12 minutes]:
testing.(*testState).waitParallel(0xc000802050)
	/usr/local/go/src/testing/testing.go:1926 +0xaf
testing.(*T).Parallel(0xc0017e4c40)
	/usr/local/go/src/testing/testing.go:1578 +0x225
k8s.io/minikube/test/integration.MaybeParallel(0xc0017e4c40)
	/home/jenkins/workspace/Build_Cross/test/integration/helpers_test.go:483 +0x34
k8s.io/minikube/test/integration.TestPause(0xc0017e4c40)
	/home/jenkins/workspace/Build_Cross/test/integration/pause_test.go:33 +0x2b
testing.tRunner(0xc0017e4c40, 0x34df7c8)
	/usr/local/go/src/testing/testing.go:1792 +0xcb
created by testing.(*T).Run in goroutine 1
	/usr/local/go/src/testing/testing.go:1851 +0x3f6

                                                
                                                
goroutine 2159 [syscall, 2 minutes]:
syscall.Syscall6(0x25eece64b98?, 0x25ec7470108?, 0x800?, 0xc000808008?, 0xc00094b800?, 0xc0009f1bf0?, 0x278659?, 0xc0017824e0?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x2c8, {0xc00094ba3d?, 0x5c3, 0x2cdf1f?}, 0x800?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc0018f5d48?, {0xc00094ba3d?, 0x0?, 0xc0009f1cb0?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc0018f5d48, {0xc00094ba3d, 0x5c3, 0x5c3})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc000534110, {0xc00094ba3d?, 0x216d3f?, 0x2625520?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc0017e2360, {0x3837c00, 0xc0001621e8})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc0017e2360}, {0x3837c00, 0xc0001621e8}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x0?, {0x3837d80, 0xc0017e2360})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0x31c12f?, {0x3837d80?, 0xc0017e2360?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc0017e2360}, {0x3837ce0, 0xc000534110}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0x34df7b8?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 692
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                                
goroutine 2190 [syscall, 6 minutes]:
syscall.Syscall6(0x25eecb71d68?, 0x25ec7470108?, 0x200?, 0xc00006b808?, 0xc0001cc200?, 0xc0014a7bf0?, 0x278659?, 0xc0017e48c0?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x568, {0xc0001cc200?, 0x200, 0x2cdf1f?}, 0x200?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc000b08fc8?, {0xc0001cc200?, 0x0?, 0xc0014a7ce0?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc000b08fc8, {0xc0001cc200, 0x200, 0x200})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc000b0c0e0, {0xc0001cc200?, 0x216d3f?, 0x2625520?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc000842870, {0x3837c00, 0xc000b0c0f8})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc000842870}, {0x3837c00, 0xc000b0c0f8}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x0?, {0x3837d80, 0xc000842870})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0xc0014a7eb0?, {0x3837d80?, 0xc000842870?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc000842870}, {0x3837ce0, 0xc000b0c0e0}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0xc000038360?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 687
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                                
goroutine 2191 [select, 6 minutes]:
os/exec.(*Cmd).watchCtx(0xc001686180, 0xc0009521c0)
	/usr/local/go/src/os/exec/exec.go:789 +0xb2
created by os/exec.(*Cmd).Start in goroutine 687
	/usr/local/go/src/os/exec/exec.go:775 +0x989

                                                
                                                
goroutine 2229 [select, 2 minutes]:
os/exec.(*Cmd).watchCtx(0xc001686600, 0xc00188a620)
	/usr/local/go/src/os/exec/exec.go:789 +0xb2
created by os/exec.(*Cmd).Start in goroutine 2084
	/usr/local/go/src/os/exec/exec.go:775 +0x989

                                                
                                                
goroutine 2161 [select, 8 minutes]:
os/exec.(*Cmd).watchCtx(0xc000a82f00, 0xc0017b45b0)
	/usr/local/go/src/os/exec/exec.go:789 +0xb2
created by os/exec.(*Cmd).Start in goroutine 692
	/usr/local/go/src/os/exec/exec.go:775 +0x989

                                                
                                                
goroutine 2227 [syscall, 2 minutes]:
syscall.Syscall6(0x25eeccfed58?, 0x25ec74705a0?, 0x800?, 0xc000601008?, 0xc001786000?, 0xc0014d3bf0?, 0x278659?, 0x2405720?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x3cc, {0xc001786309?, 0x4f7, 0x2cdf1f?}, 0x800?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc000b09208?, {0xc001786309?, 0x0?, 0xc000096508?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc000b09208, {0xc001786309, 0x4f7, 0x4f7})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc000162198, {0xc001786309?, 0x216d3f?, 0x2625520?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc00168b950, {0x3837c00, 0xc000b0c098})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc00168b950}, {0x3837c00, 0xc000b0c098}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x5b43c0?, {0x3837d80, 0xc00168b950})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0xc0014d3eb0?, {0x3837d80?, 0xc00168b950?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc00168b950}, {0x3837ce0, 0xc000162198}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0x0?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 2084
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                                
goroutine 2189 [syscall, 6 minutes]:
syscall.Syscall6(0x25eece644b8?, 0x25ec7470108?, 0x400?, 0xc000100008?, 0xc001500800?, 0xc001abdbf0?, 0x278659?, 0x21a705?)
	/usr/local/go/src/runtime/syscall_windows.go:463 +0x38
syscall.readFile(0x6ec, {0xc0015009fe?, 0x202, 0x2cdf1f?}, 0x400?, 0x800000?)
	/usr/local/go/src/syscall/zsyscall_windows.go:1020 +0x8b
syscall.ReadFile(...)
	/usr/local/go/src/syscall/syscall_windows.go:451
syscall.Read(0xc000b08b48?, {0xc0015009fe?, 0x0?, 0x41de383?})
	/usr/local/go/src/syscall/syscall_windows.go:430 +0x2d
internal/poll.(*FD).Read(0xc000b08b48, {0xc0015009fe, 0x202, 0x202})
	/usr/local/go/src/internal/poll/fd_windows.go:424 +0x1b5
os.(*File).read(...)
	/usr/local/go/src/os/file_posix.go:29
os.(*File).Read(0xc000b0c0c8, {0xc0015009fe?, 0x216d3f?, 0x2625520?})
	/usr/local/go/src/os/file.go:124 +0x4f
bytes.(*Buffer).ReadFrom(0xc0008424b0, {0x3837c00, 0xc00098e0c0})
	/usr/local/go/src/bytes/buffer.go:211 +0x98
io.copyBuffer({0x3837d80, 0xc0008424b0}, {0x3837c00, 0xc00098e0c0}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:415 +0x151
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os.genericWriteTo(0x5170720?, {0x3837d80, 0xc0008424b0})
	/usr/local/go/src/os/file.go:275 +0x4f
os.(*File).WriteTo(0xc001abdf38?, {0x3837d80?, 0xc0008424b0?})
	/usr/local/go/src/os/file.go:253 +0x49
io.copyBuffer({0x3837d80, 0xc0008424b0}, {0x3837ce0, 0xc000b0c0c8}, {0x0, 0x0, 0x0})
	/usr/local/go/src/io/io.go:411 +0x9d
io.Copy(...)
	/usr/local/go/src/io/io.go:388
os/exec.(*Cmd).writerDescriptor.func1()
	/usr/local/go/src/os/exec/exec.go:596 +0x34
os/exec.(*Cmd).Start.func2(0xc0017b4000?)
	/usr/local/go/src/os/exec/exec.go:749 +0x2c
created by os/exec.(*Cmd).Start in goroutine 687
	/usr/local/go/src/os/exec/exec.go:748 +0x9c5

                                                
                                    
x
+
TestErrorSpam/setup (198.44s)

                                                
                                                
=== RUN   TestErrorSpam/setup
error_spam_test.go:81: (dbg) Run:  out/minikube-windows-amd64.exe start -p nospam-514800 -n=1 --memory=2250 --wait=false --log_dir=C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 --driver=hyperv
error_spam_test.go:81: (dbg) Done: out/minikube-windows-amd64.exe start -p nospam-514800 -n=1 --memory=2250 --wait=false --log_dir=C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 --driver=hyperv: (3m18.4390466s)
error_spam_test.go:96: unexpected stderr: "! Failing to connect to https://registry.k8s.io/ from inside the minikube VM"
error_spam_test.go:96: unexpected stderr: "* To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/"
error_spam_test.go:110: minikube stdout:
* [nospam-514800] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
- KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
- MINIKUBE_FORCE_SYSTEMD=
- MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
- MINIKUBE_LOCATION=20604
- MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
* Using the hyperv driver based on user configuration
* Starting "nospam-514800" primary control-plane node in "nospam-514800" cluster
* Creating hyperv VM (CPUs=2, Memory=2250MB, Disk=20000MB) ...
* Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
- Generating certificates and keys ...
- Booting up control plane ...
- Configuring RBAC rules ...
* Configuring bridge CNI (Container Networking Interface) ...
* Verifying Kubernetes components...
- Using image gcr.io/k8s-minikube/storage-provisioner:v5
* Enabled addons: storage-provisioner, default-storageclass
* Done! kubectl is now configured to use "nospam-514800" cluster and "default" namespace by default
error_spam_test.go:111: minikube stderr:
! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
* To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
--- FAIL: TestErrorSpam/setup (198.44s)

                                                
                                    
x
+
TestFunctional/serial/ExtraConfig (284.39s)

                                                
                                                
=== RUN   TestFunctional/serial/ExtraConfig
functional_test.go:774: (dbg) Run:  out/minikube-windows-amd64.exe start -p functional-873100 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all
E0408 18:16:52.218683    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
functional_test.go:774: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p functional-873100 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all: exit status 90 (2m31.1070271s)

                                                
                                                
-- stdout --
	* [functional-873100] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	* Using the hyperv driver based on existing profile
	* Starting "functional-873100" primary control-plane node in "functional-873100" cluster
	* Updating the running hyperv "functional-873100" VM ...
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	* To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 18:11:02 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.285816228Z" level=info msg="Starting up"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.289021896Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.290101719Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=670
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.322435808Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347849049Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347887650Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347962351Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347977152Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348052953Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348147555Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348453962Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348542564Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348563964Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348576164Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348740168Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348978073Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352161941Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352329444Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352574050Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352739853Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352975458Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.353142862Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395578165Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395728468Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395895572Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395970374Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396127877Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396275880Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396864493Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397074097Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397170899Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397192300Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397240201Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397259601Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397430305Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397477306Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397539007Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397562207Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397577108Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397589608Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397664010Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397683910Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397706711Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397725411Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397755912Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397770712Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397798113Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397810513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397829513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397844513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397863114Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397875814Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397889314Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397907315Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397966416Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397981016Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397993517Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398058618Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398101519Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398132320Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398145220Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398155620Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398173220Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398185821Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398570329Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398732232Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398856035Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398896236Z" level=info msg="containerd successfully booted in 0.077821s"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.376333182Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.410435463Z" level=info msg="Loading containers: start."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.571607280Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.807909023Z" level=info msg="Loading containers: done."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830489939Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830838545Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830930947Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.831128451Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935123166Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935406471Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:03 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:35 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.010027367Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011810864Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011907464Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012093263Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012823862Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:36 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:36 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:36 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.061851643Z" level=info msg="Starting up"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.062875641Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.064586139Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1095
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.097709788Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124314947Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124473046Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124594646Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124721546Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124760546Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124775346Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125024645Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125117445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125145445Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125157745Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125186045Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125439445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128115041Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128211241Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130570137Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130668237Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130705437Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130727137Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130983936Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131041436Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131060936Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131082736Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131098536Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131146736Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131650035Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131810435Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131851435Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131867735Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131882735Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131904335Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131919335Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131933935Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131953335Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131967935Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131980635Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131992735Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132013135Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132027435Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132040035Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132052835Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132065735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132078335Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132090735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132103535Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132117134Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132134734Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132147034Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132158934Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132170634Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132186134Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132206534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132321534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132344034Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132462234Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132554934Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132573434Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132587634Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132597834Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132611534Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132622034Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132844333Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132919533Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132985233Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.133024633Z" level=info msg="containerd successfully booted in 0.036423s"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.119414410Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.141280877Z" level=info msg="Loading containers: start."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.279212864Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.402609773Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.519783192Z" level=info msg="Loading containers: done."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541830658Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541960058Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:37 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.581977096Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.582153396Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:47 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.193154760Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195095757Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195718756Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195932156Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.196103555Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:48 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:48 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:48 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.248289531Z" level=info msg="Starting up"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.249171830Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.250091828Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1457
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.280748981Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306283342Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306317242Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306364141Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306378641Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306400741Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306429041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306579241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306704441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306725241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306738841Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306794041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306916441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309432637Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309562037Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309828436Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309934036Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309963936Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309986136Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310391735Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310530535Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310596235Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310629135Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310692935Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310788435Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311178234Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311419334Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311575233Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311616433Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311728833Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311753533Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311769133Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311800133Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311818133Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311833133Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311862633Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311876433Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311915033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311929233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311944433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311958033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311970933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312000233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312012233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312024733Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312053133Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312066933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312078833Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312090433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312101633Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312117333Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312137933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312150433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312162833Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312220532Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312404232Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312550932Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312587832Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312598432Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312661132Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312681232Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313231631Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313512930Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313641230Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313684230Z" level=info msg="containerd successfully booted in 0.034228s"
	Apr 08 18:11:49 functional-873100 dockerd[1451]: time="2025-04-08T18:11:49.296381813Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.302038774Z" level=info msg="Loading containers: start."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.431387574Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.542525702Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.658559423Z" level=info msg="Loading containers: done."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689372876Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689573575Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728601715Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728727315Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:52 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.816831395Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817023212Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817060615Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817526956Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840205332Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840399149Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840418851Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840593166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904521638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904962276Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905346110Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905750645Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947425577Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947659497Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947910619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.948982913Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273517988Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273663500Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273923621Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.274384159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386027070Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386105976Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386123978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386316893Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491472175Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491940813Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.492151831Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.495654317Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503416550Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503579963Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504120507Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504549242Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702219637Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702344541Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702359842Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.708973385Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972305473Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972723788Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972915195Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.974488853Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.122409705Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124796787Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124979393Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.127466878Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224670622Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224748824Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224794626Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224931330Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.963794103Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965137848Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965192850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965355156Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022501487Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022595286Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022617086Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.023328883Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.794896489Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795055488Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795071088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795904285Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083642645Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083725345Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083740345Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.084285643Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.190627079Z" level=info msg="ignoring event" container=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191449376Z" level=info msg="shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191544976Z" level=warning msg="cleaning up after shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191554576Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.375599152Z" level=info msg="shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.375617052Z" level=info msg="ignoring event" container=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376550948Z" level=warning msg="cleaning up after shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376624948Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:12 functional-873100 dockerd[1451]: time="2025-04-08T18:14:12.779941913Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:14:12 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.049546353Z" level=info msg="ignoring event" container=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.053619844Z" level=info msg="shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056433738Z" level=warning msg="cleaning up after shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056714338Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.057422336Z" level=info msg="ignoring event" container=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058557534Z" level=info msg="shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058756533Z" level=warning msg="cleaning up after shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058914733Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080404288Z" level=info msg="shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.080552188Z" level=info msg="ignoring event" container=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080578588Z" level=warning msg="cleaning up after shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080688988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.091796765Z" level=info msg="shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092127964Z" level=warning msg="cleaning up after shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.092400564Z" level=info msg="ignoring event" container=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092802263Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.094192260Z" level=info msg="ignoring event" container=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.095204558Z" level=info msg="shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.096325355Z" level=warning msg="cleaning up after shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.097089854Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.107881831Z" level=info msg="ignoring event" container=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.108023331Z" level=info msg="ignoring event" container=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.107284333Z" level=info msg="shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109336728Z" level=warning msg="cleaning up after shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109635628Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123141600Z" level=info msg="shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123231799Z" level=warning msg="cleaning up after shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123281399Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.140994763Z" level=info msg="ignoring event" container=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141039262Z" level=info msg="ignoring event" container=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141064662Z" level=info msg="ignoring event" container=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.154970933Z" level=info msg="shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158618226Z" level=warning msg="cleaning up after shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160237223Z" level=info msg="ignoring event" container=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160291122Z" level=info msg="ignoring event" container=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159160825Z" level=info msg="shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161423220Z" level=warning msg="cleaning up after shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159002925Z" level=info msg="shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184316372Z" level=warning msg="cleaning up after shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184601572Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.181909277Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161173321Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.156581930Z" level=info msg="shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200229939Z" level=warning msg="cleaning up after shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200392739Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158867925Z" level=info msg="shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207020825Z" level=warning msg="cleaning up after shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207082925Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1451]: time="2025-04-08T18:14:18.018818225Z" level=info msg="ignoring event" container=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019424024Z" level=info msg="shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019904223Z" level=warning msg="cleaning up after shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.020413522Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.902208957Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.948884062Z" level=info msg="ignoring event" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949176064Z" level=info msg="shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949669067Z" level=warning msg="cleaning up after shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949710067Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011786167Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011902768Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011929568Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.012461671Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:14:24 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Consumed 5.669s CPU time.
	Apr 08 18:14:24 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.068343469Z" level=info msg="Starting up"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.069677076Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.070729582Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=4365
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.100696746Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126107485Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126196286Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126232186Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126245486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126268786Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126279486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126454287Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126574788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126593088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126603888Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126634788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126741089Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129837506Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129932306Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130405209Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130549410Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130614310Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130636510Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130875412Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130941212Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130957112Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131033513Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131058113Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131119613Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131318314Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131609316Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131702516Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131719916Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131730916Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131741816Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131752716Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131764017Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131775117Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131793717Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131805917Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131816117Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131833817Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131846517Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131857117Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131868417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131878417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131889417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131905217Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131917717Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131928417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131940317Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131951018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131961018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131970918Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132090118Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132112418Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132124718Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132134419Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132185119Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132206419Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132217619Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132228719Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132237919Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132248619Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132258319Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132570921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132663921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132859123Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132897823Z" level=info msg="containerd successfully booted in 0.033088s"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.116284488Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.164259928Z" level=info msg="Loading containers: start."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.397701599Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.522205623Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.639097209Z" level=info msg="Loading containers: done."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663639532Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663772733Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.702622627Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.703771933Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:14:25 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.332794698Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.336431912Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339499324Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339700424Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.340027126Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346149349Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346777551Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.348561258Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570602894Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570863695Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570947695Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.571185596Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658587125Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658827926Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658936527Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.659228328Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.703761195Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704154597Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704321797Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704758199Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735296714Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735429615Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735452515Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735781416Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.870831224Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871015625Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871100025Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871268526Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041786953Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041857554Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041871054Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.043550159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.292833507Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293100408Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293297408Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.294164111Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747736954Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747874254Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747890754Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.759064792Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783055174Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783139474Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783155474Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783261874Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.801445836Z" level=info msg="ignoring event" container=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802733841Z" level=info msg="shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802966241Z" level=warning msg="cleaning up after shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.803082242Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.809900565Z" level=info msg="ignoring event" container=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810781768Z" level=info msg="shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810912668Z" level=warning msg="cleaning up after shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.811062969Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.825885519Z" level=info msg="ignoring event" container=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.827939026Z" level=info msg="ignoring event" container=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.828877629Z" level=info msg="shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829460931Z" level=warning msg="cleaning up after shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829749532Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829121630Z" level=info msg="shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.831057437Z" level=warning msg="cleaning up after shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.885542622Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.886211624Z" level=info msg="shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887807230Z" level=warning msg="cleaning up after shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887863730Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.907464597Z" level=info msg="ignoring event" container=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.946953731Z" level=info msg="shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947152032Z" level=warning msg="cleaning up after shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947260432Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.958927872Z" level=info msg="shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.959093972Z" level=warning msg="cleaning up after shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.975088427Z" level=info msg="ignoring event" container=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.976303131Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.979795943Z" level=info msg="ignoring event" container=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.974915726Z" level=info msg="shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988113471Z" level=warning msg="cleaning up after shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988215471Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.045577851Z" level=info msg="shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.046177453Z" level=warning msg="cleaning up after shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.057572988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.133079619Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to remove runc container\" error=\"runc did not terminate successfully: exit status 255: \" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.166032719Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.167119423Z" level=error msg="copy shim log" error="read /proc/self/fd/48: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175573749Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175584449Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.203653735Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.204855538Z" level=error msg="copy shim log" error="read /proc/self/fd/50: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208656650Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208769450Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.341966357Z" level=error msg="collecting stats for container /k8s_storage-provisioner_storage-provisioner_kube-system_4be23f5b-6dcb-49ce-ac59-cfc1301fdbee_1: invalid id: "
	Apr 08 18:14:30 functional-873100 dockerd[4359]: 2025/04/08 18:14:30 http: superfluous response.WriteHeader call from go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp.(*respWriterWrapper).WriteHeader (wrap.go:98)
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.541261167Z" level=info msg="ignoring event" container=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542776071Z" level=info msg="shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542827172Z" level=warning msg="cleaning up after shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542837372Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.566194643Z" level=info msg="ignoring event" container=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566032643Z" level=info msg="shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566907745Z" level=warning msg="cleaning up after shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.567603047Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835197266Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835287766Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835327966Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835459166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911686499Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911805000Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911955900Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.912386202Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101356747Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101551148Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101662048Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101882249Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415314838Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415450438Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415603738Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415976039Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474268465Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474348065Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474365065Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474507465Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.482192682Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483364584Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483571085Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.484600587Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526458677Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526763178Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526813978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526935978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133647850Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133723750Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133738750Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133835850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.286971612Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.287313112Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.290652915Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.294746619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366454090Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366574390Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366587390Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366700890Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396505520Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396902520Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.397144620Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.399433222Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727525851Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727736952Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727833252Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.728181052Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.850915184Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852051185Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852067185Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852166785Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.064848638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065394739Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065518940Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.066853044Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:17:50 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.365807392Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568261688Z" level=info msg="shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568876587Z" level=warning msg="cleaning up after shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568997887Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.589148847Z" level=info msg="ignoring event" container=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623443078Z" level=info msg="shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623584378Z" level=warning msg="cleaning up after shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623887877Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.625252675Z" level=info msg="ignoring event" container=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.627785270Z" level=info msg="ignoring event" container=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627830469Z" level=info msg="shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627921669Z" level=warning msg="cleaning up after shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627934569Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.642668740Z" level=info msg="ignoring event" container=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644475036Z" level=info msg="shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644564436Z" level=warning msg="cleaning up after shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644577436Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.683973658Z" level=info msg="ignoring event" container=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686629852Z" level=info msg="shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686863652Z" level=warning msg="cleaning up after shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686935352Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.689544746Z" level=info msg="ignoring event" container=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691461743Z" level=info msg="shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691757842Z" level=warning msg="cleaning up after shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691850042Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698043329Z" level=info msg="shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698224529Z" level=warning msg="cleaning up after shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.699562326Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701518623Z" level=info msg="shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701583722Z" level=warning msg="cleaning up after shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701597022Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.704846016Z" level=info msg="shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706195213Z" level=info msg="ignoring event" container=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706384813Z" level=info msg="ignoring event" container=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706434613Z" level=info msg="ignoring event" container=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706453213Z" level=info msg="ignoring event" container=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708810608Z" level=warning msg="cleaning up after shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708891008Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.706709612Z" level=info msg="shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711050604Z" level=warning msg="cleaning up after shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711111803Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723156879Z" level=info msg="ignoring event" container=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723218679Z" level=info msg="ignoring event" container=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728637968Z" level=info msg="shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728820868Z" level=warning msg="cleaning up after shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728838268Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739136148Z" level=info msg="shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739219747Z" level=warning msg="cleaning up after shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739233947Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4359]: time="2025-04-08T18:17:55.602976948Z" level=info msg="ignoring event" container=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605413243Z" level=info msg="shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605936142Z" level=warning msg="cleaning up after shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.606055942Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.606652814Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.663136604Z" level=info msg="ignoring event" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663487579Z" level=info msg="shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663555775Z" level=warning msg="cleaning up after shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663584972Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733154233Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733206530Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733487510Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733578803Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:18:01 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Consumed 9.706s CPU time.
	Apr 08 18:18:01 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:18:01 functional-873100 dockerd[8415]: time="2025-04-08T18:18:01.793220155Z" level=info msg="Starting up"
	Apr 08 18:19:01 functional-873100 dockerd[8415]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 18:19:01 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	* 
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯

                                                
                                                
** /stderr **
functional_test.go:776: failed to restart minikube. args "out/minikube-windows-amd64.exe start -p functional-873100 --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision --wait=all": exit status 90
functional_test.go:778: restart took 2m31.2842241s for "functional-873100" cluster.
I0408 18:19:02.155776    7904 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.34705s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/serial/ExtraConfig FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/ExtraConfig]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (1m48.1600728s)
helpers_test.go:252: TestFunctional/serial/ExtraConfig logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| delete  | -p nospam-514800                                                         | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:09 UTC |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:09 UTC | 08 Apr 25 18:12 UTC |
	|         | --memory=4000                                                            |                   |                   |         |                     |                     |
	|         | --apiserver-port=8441                                                    |                   |                   |         |                     |                     |
	|         | --wait=all --driver=hyperv                                               |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:12 UTC | 08 Apr 25 18:15 UTC |
	|         | --alsologtostderr -v=8                                                   |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | minikube-local-cache-test:functional-873100                              |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache delete                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | minikube-local-cache-test:functional-873100                              |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |                   |         |                     |                     |
	| cache   | list                                                                     | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	| ssh     | functional-873100 ssh sudo                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | crictl images                                                            |                   |                   |         |                     |                     |
	| ssh     | functional-873100                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | ssh sudo docker rmi                                                      |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| ssh     | functional-873100 ssh                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC |                     |
	|         | sudo crictl inspecti                                                     |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache reload                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	| ssh     | functional-873100 ssh                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | sudo crictl inspecti                                                     |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| kubectl | functional-873100 kubectl --                                             | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | --context functional-873100                                              |                   |                   |         |                     |                     |
	|         | get pods                                                                 |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC |                     |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |                   |         |                     |                     |
	|         | --wait=all                                                               |                   |                   |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:16:31
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:16:31.021887    6216 out.go:345] Setting OutFile to fd 880 ...
	I0408 18:16:31.110083    6216 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:16:31.110083    6216 out.go:358] Setting ErrFile to fd 708...
	I0408 18:16:31.110083    6216 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:16:31.136248    6216 out.go:352] Setting JSON to false
	I0408 18:16:31.139306    6216 start.go:129] hostinfo: {"hostname":"minikube3","uptime":96976,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:16:31.139306    6216 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:16:31.143307    6216 out.go:177] * [functional-873100] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:16:31.145632    6216 notify.go:220] Checking for updates...
	I0408 18:16:31.146590    6216 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:16:31.148476    6216 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:16:31.151774    6216 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:16:31.154692    6216 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:16:31.157022    6216 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:16:31.161091    6216 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:16:31.161483    6216 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 18:16:36.628311    6216 out.go:177] * Using the hyperv driver based on existing profile
	I0408 18:16:36.633898    6216 start.go:297] selected driver: hyperv
	I0408 18:16:36.633898    6216 start.go:901] validating driver "hyperv" against &{Name:functional-873100 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:4000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clust
erName:functional-873100 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.46.213 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docke
r MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:16:36.634100    6216 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 18:16:36.684586    6216 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:16:36.685631    6216 cni.go:84] Creating CNI manager for ""
	I0408 18:16:36.685631    6216 cni.go:158] "hyperv" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
	I0408 18:16:36.685631    6216 start.go:340] cluster config:
	{Name:functional-873100 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:4000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:functional-873100 Namespace:default APIServerHAVIP: APIServerName:minikub
eCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.46.213 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker
MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:16:36.685631    6216 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:16:36.689336    6216 out.go:177] * Starting "functional-873100" primary control-plane node in "functional-873100" cluster
	I0408 18:16:36.692419    6216 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:16:36.692419    6216 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 18:16:36.693415    6216 cache.go:56] Caching tarball of preloaded images
	I0408 18:16:36.693415    6216 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:16:36.693415    6216 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:16:36.693415    6216 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\functional-873100\config.json ...
	I0408 18:16:36.696369    6216 start.go:360] acquireMachinesLock for functional-873100: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:16:36.696369    6216 start.go:364] duration metric: took 0s to acquireMachinesLock for "functional-873100"
	I0408 18:16:36.696369    6216 start.go:96] Skipping create...Using existing machine configuration
	I0408 18:16:36.696369    6216 fix.go:54] fixHost starting: 
	I0408 18:16:36.696369    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:39.472290    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:39.473318    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:39.473635    6216 fix.go:112] recreateIfNeeded on functional-873100: state=Running err=<nil>
	W0408 18:16:39.473635    6216 fix.go:138] unexpected machine state, will restart: <nil>
	I0408 18:16:39.476849    6216 out.go:177] * Updating the running hyperv "functional-873100" VM ...
	I0408 18:16:39.480384    6216 machine.go:93] provisionDockerMachine start ...
	I0408 18:16:39.480384    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:41.660684    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:41.660684    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:41.660790    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:44.227554    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:44.227554    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:44.233819    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:44.234582    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:44.234582    6216 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:16:44.372652    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-873100
	
	I0408 18:16:44.372820    6216 buildroot.go:166] provisioning hostname "functional-873100"
	I0408 18:16:44.372896    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:49.081094    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:49.081094    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:49.086652    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:49.087319    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:49.087319    6216 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-873100 && echo "functional-873100" | sudo tee /etc/hostname
	I0408 18:16:49.253854    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-873100
	
	I0408 18:16:49.253854    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:51.474378    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:51.475238    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:51.475291    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:54.044461    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:54.044461    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:54.050383    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:54.051127    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:54.051127    6216 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-873100' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-873100/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-873100' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:16:54.194433    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:16:54.194433    6216 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:16:54.194433    6216 buildroot.go:174] setting up certificates
	I0408 18:16:54.194433    6216 provision.go:84] configureAuth start
	I0408 18:16:54.194433    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:58.957856    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:58.957856    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:58.958462    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:01.106530    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:01.107533    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:01.107638    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:03.665247    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:03.665247    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:03.665247    6216 provision.go:143] copyHostCerts
	I0408 18:17:03.665955    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:17:03.665955    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:17:03.666390    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:17:03.667888    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:17:03.667888    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:17:03.668325    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:17:03.669528    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:17:03.669604    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:17:03.670008    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:17:03.670764    6216 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.functional-873100 san=[127.0.0.1 172.22.46.213 functional-873100 localhost minikube]
	I0408 18:17:03.745755    6216 provision.go:177] copyRemoteCerts
	I0408 18:17:03.756671    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:17:03.756671    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:08.534412    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:08.535135    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:08.535250    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:08.637448    6216 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.8807374s)
	I0408 18:17:08.637982    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:17:08.682982    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1220 bytes)
	I0408 18:17:08.736779    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 18:17:08.784297    6216 provision.go:87] duration metric: took 14.5897473s to configureAuth
	I0408 18:17:08.784297    6216 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:17:08.785070    6216 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:17:08.785070    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:10.945671    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:10.945671    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:10.946380    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:13.561057    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:13.561658    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:13.569789    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:13.569789    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:13.569789    6216 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:17:13.733212    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:17:13.733311    6216 buildroot.go:70] root file system type: tmpfs
	I0408 18:17:13.733511    6216 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:17:13.733599    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:15.913111    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:15.914277    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:15.914356    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:18.525466    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:18.525466    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:18.530458    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:18.531119    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:18.531119    6216 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:17:18.697820    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:17:18.697820    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:23.478351    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:23.479383    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:23.486093    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:23.486192    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:23.486192    6216 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:17:23.637970    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:17:23.637999    6216 machine.go:96] duration metric: took 44.1572332s to provisionDockerMachine
	I0408 18:17:23.637999    6216 start.go:293] postStartSetup for "functional-873100" (driver="hyperv")
	I0408 18:17:23.637999    6216 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:17:23.650712    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:17:23.650712    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:25.838240    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:25.838240    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:25.839006    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:28.426608    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:28.426608    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:28.427239    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:28.542320    6216 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8914882s)
	I0408 18:17:28.552476    6216 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:17:28.560034    6216 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:17:28.560034    6216 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:17:28.560034    6216 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:17:28.561202    6216 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:17:28.562575    6216 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\test\nested\copy\7904\hosts -> hosts in /etc/test/nested/copy/7904
	I0408 18:17:28.574228    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/7904
	I0408 18:17:28.594750    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:17:28.643651    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\test\nested\copy\7904\hosts --> /etc/test/nested/copy/7904/hosts (40 bytes)
	I0408 18:17:28.693612    6216 start.go:296] duration metric: took 5.0555717s for postStartSetup
	I0408 18:17:28.693612    6216 fix.go:56] duration metric: took 51.9968266s for fixHost
	I0408 18:17:28.693612    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:30.853257    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:30.853257    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:30.854166    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:33.460251    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:33.460251    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:33.465198    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:33.465944    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:33.465944    6216 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:17:33.607112    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744136253.610857190
	
	I0408 18:17:33.607112    6216 fix.go:216] guest clock: 1744136253.610857190
	I0408 18:17:33.607112    6216 fix.go:229] Guest: 2025-04-08 18:17:33.61085719 +0000 UTC Remote: 2025-04-08 18:17:28.693612 +0000 UTC m=+57.786988401 (delta=4.91724519s)
	I0408 18:17:33.607244    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:38.347997    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:38.347997    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:38.354045    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:38.354602    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:38.354602    6216 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744136253
	I0408 18:17:38.515883    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:17:33 UTC 2025
	
	I0408 18:17:38.515883    6216 fix.go:236] clock set: Tue Apr  8 18:17:33 UTC 2025
	 (err=<nil>)
	I0408 18:17:38.515883    6216 start.go:83] releasing machines lock for "functional-873100", held for 1m1.8190194s
	I0408 18:17:38.516256    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:40.697845    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:40.697845    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:40.698508    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:43.246296    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:43.246296    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:43.250147    6216 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:17:43.250279    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:43.259990    6216 ssh_runner.go:195] Run: cat /version.json
	I0408 18:17:43.259990    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:45.489695    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:45.489695    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:45.489853    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:48.266232    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:48.266232    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:48.266232    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:48.299617    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:48.299617    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:48.299723    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:48.366194    6216 ssh_runner.go:235] Completed: cat /version.json: (5.1061106s)
	I0408 18:17:48.379576    6216 ssh_runner.go:195] Run: systemctl --version
	I0408 18:17:48.384571    6216 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1343833s)
	W0408 18:17:48.384571    6216 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:17:48.401258    6216 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	W0408 18:17:48.411391    6216 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:17:48.423739    6216 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:17:48.443315    6216 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0408 18:17:48.443315    6216 start.go:495] detecting cgroup driver to use...
	I0408 18:17:48.444326    6216 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:17:48.493985    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 18:17:48.497006    6216 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:17:48.497006    6216 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:17:48.525175    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:17:48.545264    6216 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:17:48.556065    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:17:48.585977    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:17:48.619596    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:17:48.655324    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:17:48.686019    6216 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:17:48.721717    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:17:48.751706    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:17:48.784088    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:17:48.816035    6216 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:17:48.846901    6216 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:17:48.874989    6216 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:17:49.133526    6216 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:17:49.169930    6216 start.go:495] detecting cgroup driver to use...
	I0408 18:17:49.181273    6216 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:17:49.216427    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:17:49.251912    6216 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:17:49.303361    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:17:49.347572    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:17:49.371335    6216 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:17:49.420257    6216 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:17:49.439816    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:17:49.458000    6216 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:17:49.503791    6216 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:17:49.766456    6216 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:17:50.009336    6216 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:17:50.009619    6216 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:17:50.069094    6216 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:17:50.331199    6216 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:19:01.817611    6216 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m11.4858402s)
	I0408 18:19:01.830434    6216 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 18:19:01.919528    6216 out.go:201] 
	W0408 18:19:01.924846    6216 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 18:11:02 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.285816228Z" level=info msg="Starting up"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.289021896Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.290101719Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=670
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.322435808Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347849049Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347887650Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347962351Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347977152Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348052953Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348147555Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348453962Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348542564Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348563964Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348576164Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348740168Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348978073Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352161941Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352329444Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352574050Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352739853Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352975458Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.353142862Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395578165Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395728468Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395895572Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395970374Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396127877Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396275880Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396864493Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397074097Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397170899Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397192300Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397240201Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397259601Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397430305Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397477306Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397539007Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397562207Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397577108Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397589608Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397664010Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397683910Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397706711Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397725411Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397755912Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397770712Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397798113Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397810513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397829513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397844513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397863114Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397875814Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397889314Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397907315Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397966416Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397981016Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397993517Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398058618Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398101519Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398132320Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398145220Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398155620Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398173220Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398185821Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398570329Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398732232Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398856035Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398896236Z" level=info msg="containerd successfully booted in 0.077821s"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.376333182Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.410435463Z" level=info msg="Loading containers: start."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.571607280Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.807909023Z" level=info msg="Loading containers: done."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830489939Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830838545Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830930947Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.831128451Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935123166Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935406471Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:03 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:35 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.010027367Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011810864Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011907464Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012093263Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012823862Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:36 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:36 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:36 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.061851643Z" level=info msg="Starting up"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.062875641Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.064586139Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1095
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.097709788Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124314947Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124473046Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124594646Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124721546Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124760546Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124775346Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125024645Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125117445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125145445Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125157745Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125186045Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125439445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128115041Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128211241Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130570137Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130668237Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130705437Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130727137Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130983936Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131041436Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131060936Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131082736Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131098536Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131146736Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131650035Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131810435Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131851435Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131867735Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131882735Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131904335Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131919335Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131933935Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131953335Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131967935Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131980635Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131992735Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132013135Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132027435Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132040035Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132052835Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132065735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132078335Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132090735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132103535Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132117134Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132134734Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132147034Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132158934Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132170634Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132186134Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132206534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132321534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132344034Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132462234Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132554934Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132573434Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132587634Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132597834Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132611534Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132622034Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132844333Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132919533Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132985233Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.133024633Z" level=info msg="containerd successfully booted in 0.036423s"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.119414410Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.141280877Z" level=info msg="Loading containers: start."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.279212864Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.402609773Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.519783192Z" level=info msg="Loading containers: done."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541830658Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541960058Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:37 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.581977096Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.582153396Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:47 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.193154760Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195095757Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195718756Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195932156Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.196103555Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:48 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:48 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:48 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.248289531Z" level=info msg="Starting up"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.249171830Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.250091828Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1457
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.280748981Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306283342Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306317242Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306364141Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306378641Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306400741Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306429041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306579241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306704441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306725241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306738841Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306794041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306916441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309432637Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309562037Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309828436Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309934036Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309963936Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309986136Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310391735Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310530535Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310596235Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310629135Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310692935Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310788435Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311178234Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311419334Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311575233Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311616433Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311728833Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311753533Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311769133Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311800133Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311818133Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311833133Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311862633Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311876433Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311915033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311929233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311944433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311958033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311970933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312000233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312012233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312024733Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312053133Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312066933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312078833Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312090433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312101633Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312117333Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312137933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312150433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312162833Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312220532Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312404232Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312550932Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312587832Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312598432Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312661132Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312681232Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313231631Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313512930Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313641230Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313684230Z" level=info msg="containerd successfully booted in 0.034228s"
	Apr 08 18:11:49 functional-873100 dockerd[1451]: time="2025-04-08T18:11:49.296381813Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.302038774Z" level=info msg="Loading containers: start."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.431387574Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.542525702Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.658559423Z" level=info msg="Loading containers: done."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689372876Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689573575Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728601715Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728727315Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:52 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.816831395Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817023212Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817060615Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817526956Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840205332Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840399149Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840418851Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840593166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904521638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904962276Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905346110Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905750645Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947425577Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947659497Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947910619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.948982913Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273517988Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273663500Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273923621Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.274384159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386027070Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386105976Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386123978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386316893Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491472175Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491940813Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.492151831Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.495654317Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503416550Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503579963Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504120507Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504549242Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702219637Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702344541Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702359842Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.708973385Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972305473Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972723788Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972915195Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.974488853Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.122409705Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124796787Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124979393Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.127466878Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224670622Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224748824Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224794626Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224931330Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.963794103Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965137848Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965192850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965355156Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022501487Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022595286Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022617086Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.023328883Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.794896489Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795055488Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795071088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795904285Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083642645Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083725345Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083740345Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.084285643Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.190627079Z" level=info msg="ignoring event" container=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191449376Z" level=info msg="shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191544976Z" level=warning msg="cleaning up after shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191554576Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.375599152Z" level=info msg="shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.375617052Z" level=info msg="ignoring event" container=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376550948Z" level=warning msg="cleaning up after shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376624948Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:12 functional-873100 dockerd[1451]: time="2025-04-08T18:14:12.779941913Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:14:12 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.049546353Z" level=info msg="ignoring event" container=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.053619844Z" level=info msg="shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056433738Z" level=warning msg="cleaning up after shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056714338Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.057422336Z" level=info msg="ignoring event" container=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058557534Z" level=info msg="shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058756533Z" level=warning msg="cleaning up after shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058914733Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080404288Z" level=info msg="shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.080552188Z" level=info msg="ignoring event" container=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080578588Z" level=warning msg="cleaning up after shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080688988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.091796765Z" level=info msg="shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092127964Z" level=warning msg="cleaning up after shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.092400564Z" level=info msg="ignoring event" container=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092802263Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.094192260Z" level=info msg="ignoring event" container=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.095204558Z" level=info msg="shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.096325355Z" level=warning msg="cleaning up after shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.097089854Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.107881831Z" level=info msg="ignoring event" container=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.108023331Z" level=info msg="ignoring event" container=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.107284333Z" level=info msg="shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109336728Z" level=warning msg="cleaning up after shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109635628Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123141600Z" level=info msg="shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123231799Z" level=warning msg="cleaning up after shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123281399Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.140994763Z" level=info msg="ignoring event" container=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141039262Z" level=info msg="ignoring event" container=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141064662Z" level=info msg="ignoring event" container=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.154970933Z" level=info msg="shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158618226Z" level=warning msg="cleaning up after shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160237223Z" level=info msg="ignoring event" container=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160291122Z" level=info msg="ignoring event" container=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159160825Z" level=info msg="shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161423220Z" level=warning msg="cleaning up after shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159002925Z" level=info msg="shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184316372Z" level=warning msg="cleaning up after shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184601572Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.181909277Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161173321Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.156581930Z" level=info msg="shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200229939Z" level=warning msg="cleaning up after shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200392739Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158867925Z" level=info msg="shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207020825Z" level=warning msg="cleaning up after shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207082925Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1451]: time="2025-04-08T18:14:18.018818225Z" level=info msg="ignoring event" container=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019424024Z" level=info msg="shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019904223Z" level=warning msg="cleaning up after shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.020413522Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.902208957Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.948884062Z" level=info msg="ignoring event" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949176064Z" level=info msg="shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949669067Z" level=warning msg="cleaning up after shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949710067Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011786167Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011902768Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011929568Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.012461671Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:14:24 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Consumed 5.669s CPU time.
	Apr 08 18:14:24 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.068343469Z" level=info msg="Starting up"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.069677076Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.070729582Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=4365
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.100696746Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126107485Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126196286Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126232186Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126245486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126268786Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126279486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126454287Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126574788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126593088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126603888Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126634788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126741089Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129837506Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129932306Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130405209Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130549410Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130614310Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130636510Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130875412Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130941212Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130957112Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131033513Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131058113Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131119613Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131318314Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131609316Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131702516Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131719916Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131730916Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131741816Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131752716Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131764017Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131775117Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131793717Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131805917Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131816117Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131833817Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131846517Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131857117Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131868417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131878417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131889417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131905217Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131917717Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131928417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131940317Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131951018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131961018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131970918Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132090118Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132112418Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132124718Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132134419Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132185119Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132206419Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132217619Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132228719Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132237919Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132248619Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132258319Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132570921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132663921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132859123Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132897823Z" level=info msg="containerd successfully booted in 0.033088s"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.116284488Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.164259928Z" level=info msg="Loading containers: start."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.397701599Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.522205623Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.639097209Z" level=info msg="Loading containers: done."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663639532Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663772733Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.702622627Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.703771933Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:14:25 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.332794698Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.336431912Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339499324Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339700424Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.340027126Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346149349Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346777551Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.348561258Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570602894Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570863695Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570947695Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.571185596Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658587125Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658827926Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658936527Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.659228328Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.703761195Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704154597Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704321797Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704758199Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735296714Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735429615Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735452515Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735781416Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.870831224Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871015625Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871100025Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871268526Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041786953Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041857554Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041871054Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.043550159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.292833507Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293100408Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293297408Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.294164111Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747736954Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747874254Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747890754Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.759064792Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783055174Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783139474Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783155474Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783261874Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.801445836Z" level=info msg="ignoring event" container=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802733841Z" level=info msg="shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802966241Z" level=warning msg="cleaning up after shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.803082242Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.809900565Z" level=info msg="ignoring event" container=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810781768Z" level=info msg="shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810912668Z" level=warning msg="cleaning up after shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.811062969Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.825885519Z" level=info msg="ignoring event" container=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.827939026Z" level=info msg="ignoring event" container=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.828877629Z" level=info msg="shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829460931Z" level=warning msg="cleaning up after shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829749532Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829121630Z" level=info msg="shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.831057437Z" level=warning msg="cleaning up after shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.885542622Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.886211624Z" level=info msg="shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887807230Z" level=warning msg="cleaning up after shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887863730Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.907464597Z" level=info msg="ignoring event" container=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.946953731Z" level=info msg="shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947152032Z" level=warning msg="cleaning up after shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947260432Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.958927872Z" level=info msg="shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.959093972Z" level=warning msg="cleaning up after shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.975088427Z" level=info msg="ignoring event" container=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.976303131Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.979795943Z" level=info msg="ignoring event" container=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.974915726Z" level=info msg="shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988113471Z" level=warning msg="cleaning up after shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988215471Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.045577851Z" level=info msg="shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.046177453Z" level=warning msg="cleaning up after shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.057572988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.133079619Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to remove runc container\" error=\"runc did not terminate successfully: exit status 255: \" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.166032719Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.167119423Z" level=error msg="copy shim log" error="read /proc/self/fd/48: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175573749Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175584449Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.203653735Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.204855538Z" level=error msg="copy shim log" error="read /proc/self/fd/50: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208656650Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208769450Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.341966357Z" level=error msg="collecting stats for container /k8s_storage-provisioner_storage-provisioner_kube-system_4be23f5b-6dcb-49ce-ac59-cfc1301fdbee_1: invalid id: "
	Apr 08 18:14:30 functional-873100 dockerd[4359]: 2025/04/08 18:14:30 http: superfluous response.WriteHeader call from go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp.(*respWriterWrapper).WriteHeader (wrap.go:98)
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.541261167Z" level=info msg="ignoring event" container=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542776071Z" level=info msg="shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542827172Z" level=warning msg="cleaning up after shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542837372Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.566194643Z" level=info msg="ignoring event" container=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566032643Z" level=info msg="shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566907745Z" level=warning msg="cleaning up after shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.567603047Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835197266Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835287766Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835327966Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835459166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911686499Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911805000Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911955900Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.912386202Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101356747Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101551148Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101662048Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101882249Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415314838Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415450438Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415603738Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415976039Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474268465Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474348065Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474365065Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474507465Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.482192682Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483364584Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483571085Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.484600587Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526458677Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526763178Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526813978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526935978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133647850Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133723750Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133738750Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133835850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.286971612Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.287313112Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.290652915Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.294746619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366454090Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366574390Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366587390Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366700890Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396505520Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396902520Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.397144620Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.399433222Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727525851Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727736952Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727833252Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.728181052Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.850915184Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852051185Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852067185Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852166785Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.064848638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065394739Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065518940Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.066853044Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:17:50 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.365807392Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568261688Z" level=info msg="shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568876587Z" level=warning msg="cleaning up after shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568997887Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.589148847Z" level=info msg="ignoring event" container=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623443078Z" level=info msg="shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623584378Z" level=warning msg="cleaning up after shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623887877Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.625252675Z" level=info msg="ignoring event" container=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.627785270Z" level=info msg="ignoring event" container=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627830469Z" level=info msg="shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627921669Z" level=warning msg="cleaning up after shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627934569Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.642668740Z" level=info msg="ignoring event" container=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644475036Z" level=info msg="shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644564436Z" level=warning msg="cleaning up after shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644577436Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.683973658Z" level=info msg="ignoring event" container=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686629852Z" level=info msg="shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686863652Z" level=warning msg="cleaning up after shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686935352Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.689544746Z" level=info msg="ignoring event" container=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691461743Z" level=info msg="shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691757842Z" level=warning msg="cleaning up after shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691850042Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698043329Z" level=info msg="shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698224529Z" level=warning msg="cleaning up after shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.699562326Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701518623Z" level=info msg="shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701583722Z" level=warning msg="cleaning up after shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701597022Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.704846016Z" level=info msg="shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706195213Z" level=info msg="ignoring event" container=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706384813Z" level=info msg="ignoring event" container=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706434613Z" level=info msg="ignoring event" container=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706453213Z" level=info msg="ignoring event" container=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708810608Z" level=warning msg="cleaning up after shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708891008Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.706709612Z" level=info msg="shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711050604Z" level=warning msg="cleaning up after shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711111803Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723156879Z" level=info msg="ignoring event" container=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723218679Z" level=info msg="ignoring event" container=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728637968Z" level=info msg="shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728820868Z" level=warning msg="cleaning up after shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728838268Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739136148Z" level=info msg="shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739219747Z" level=warning msg="cleaning up after shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739233947Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4359]: time="2025-04-08T18:17:55.602976948Z" level=info msg="ignoring event" container=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605413243Z" level=info msg="shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605936142Z" level=warning msg="cleaning up after shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.606055942Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.606652814Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.663136604Z" level=info msg="ignoring event" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663487579Z" level=info msg="shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663555775Z" level=warning msg="cleaning up after shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663584972Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733154233Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733206530Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733487510Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733578803Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:18:01 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Consumed 9.706s CPU time.
	Apr 08 18:18:01 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:18:01 functional-873100 dockerd[8415]: time="2025-04-08T18:18:01.793220155Z" level=info msg="Starting up"
	Apr 08 18:19:01 functional-873100 dockerd[8415]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 18:19:01 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 18:19:01.925950    6216 out.go:270] * 
	W0408 18:19:01.927406    6216 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 18:19:01.932212    6216 out.go:201] 
	
	
	==> Docker <==
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Unable to get docker version: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:20:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:20:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:20:02 functional-873100 systemd[1]: docker.service: Scheduled restart job, restart counter is at 2.
	Apr 08 18:20:02 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:20:02 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:20:04Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[  +5.449904] systemd-fstab-generator[2412]: Ignoring "noauto" option for root device
	[  +0.203006] kauditd_printk_skb: 12 callbacks suppressed
	[  +8.232409] kauditd_printk_skb: 88 callbacks suppressed
	[ +28.267561] kauditd_printk_skb: 10 callbacks suppressed
	[Apr 8 18:14] systemd-fstab-generator[3877]: Ignoring "noauto" option for root device
	[  +0.672076] systemd-fstab-generator[3911]: Ignoring "noauto" option for root device
	[  +0.280795] systemd-fstab-generator[3923]: Ignoring "noauto" option for root device
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	
	
	==> kernel <==
	 18:21:02 up 11 min,  0 users,  load average: 0.00, 0.12, 0.11
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:21:01 functional-873100 kubelet[5867]: E0408 18:21:01.319234    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416376    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416437    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416467    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416496    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416509    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416573    5867 kubelet.go:3018] "Container runtime not ready" runtimeReady="RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.416604    5867 log.go:32] "Version from runtime service failed" err="rpc error: code = Unknown desc = failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: I0408 18:21:02.416692    5867 setters.go:602] "Node became not ready" node="functional-873100" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-04-08T18:21:02Z","lastTransitionTime":"2025-04-08T18:21:02Z","reason":"KubeletNotReady","message":"[container runtime is down, PLEG is not healthy: pleg was last seen active 3m12.897165812s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"}
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.418623    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.419841    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.419112    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:21:02Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:21:02Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:21:02Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:21:02Z\\\",\\\"lastTransitionTime\\\":\\\"2025-04-08T18:21:02Z\\\",\\\"message\\\":\\\"[container runtime is down, PLEG is not healthy: pleg was last seen active 3m12.897165812s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to g
et docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"nodeInfo\\\":{\\\"containerRuntimeVersion\\\":\\\"docker://Unknown\\\"}}}\" for node \"functional-873100\": Patch \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100/status?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.419588    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.419996    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.419806    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.420023    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: I0408 18:21:02.420035    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.420475    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.420554    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.420826    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.421220    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.422479    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.429181    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.431881    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:21:02 functional-873100 kubelet[5867]: E0408 18:21:02.431938    5867 kubelet_node_status.go:536] "Unable to update node status" err="update node status exceeds retry count"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:20:02.072318    3200 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.107415    3200 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.139195    3200 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.174416    3200 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.208972    3200 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.245311    3200 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.278971    3200 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:20:02.310779    3200 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.1980431s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/serial/ExtraConfig (284.39s)

                                                
                                    
x
+
TestFunctional/serial/ComponentHealth (180.66s)

                                                
                                                
=== RUN   TestFunctional/serial/ComponentHealth
functional_test.go:827: (dbg) Run:  kubectl --context functional-873100 get po -l tier=control-plane -n kube-system -o=json
functional_test.go:827: (dbg) Non-zero exit: kubectl --context functional-873100 get po -l tier=control-plane -n kube-system -o=json: exit status 1 (2.1895517s)

                                                
                                                
-- stdout --
	{
	    "apiVersion": "v1",
	    "items": [],
	    "kind": "List",
	    "metadata": {
	        "resourceVersion": ""
	    }
	}

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:829: failed to get components. args "kubectl --context functional-873100 get po -l tier=control-plane -n kube-system -o=json": exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.0027116s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/serial/ComponentHealth FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/serial/ComponentHealth]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
E0408 18:21:52.220873    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:23:15.298602    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (2m33.8957833s)
helpers_test.go:252: TestFunctional/serial/ComponentHealth logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| Command |                                   Args                                   |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| unpause | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:07 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | unpause                                                                  |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:07 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| stop    | nospam-514800 --log_dir                                                  | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:08 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800              |                   |                   |         |                     |                     |
	|         | stop                                                                     |                   |                   |         |                     |                     |
	| delete  | -p nospam-514800                                                         | nospam-514800     | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:08 UTC | 08 Apr 25 18:09 UTC |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:09 UTC | 08 Apr 25 18:12 UTC |
	|         | --memory=4000                                                            |                   |                   |         |                     |                     |
	|         | --apiserver-port=8441                                                    |                   |                   |         |                     |                     |
	|         | --wait=all --driver=hyperv                                               |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:12 UTC | 08 Apr 25 18:15 UTC |
	|         | --alsologtostderr -v=8                                                   |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache add                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | minikube-local-cache-test:functional-873100                              |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache delete                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | minikube-local-cache-test:functional-873100                              |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | registry.k8s.io/pause:3.3                                                |                   |                   |         |                     |                     |
	| cache   | list                                                                     | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	| ssh     | functional-873100 ssh sudo                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | crictl images                                                            |                   |                   |         |                     |                     |
	| ssh     | functional-873100                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC | 08 Apr 25 18:15 UTC |
	|         | ssh sudo docker rmi                                                      |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| ssh     | functional-873100 ssh                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:15 UTC |                     |
	|         | sudo crictl inspecti                                                     |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | functional-873100 cache reload                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	| ssh     | functional-873100 ssh                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | sudo crictl inspecti                                                     |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:3.1                                                |                   |                   |         |                     |                     |
	| cache   | delete                                                                   | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:latest                                             |                   |                   |         |                     |                     |
	| kubectl | functional-873100 kubectl --                                             | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | --context functional-873100                                              |                   |                   |         |                     |                     |
	|         | get pods                                                                 |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC |                     |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision |                   |                   |         |                     |                     |
	|         | --wait=all                                                               |                   |                   |         |                     |                     |
	|---------|--------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:16:31
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:16:31.021887    6216 out.go:345] Setting OutFile to fd 880 ...
	I0408 18:16:31.110083    6216 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:16:31.110083    6216 out.go:358] Setting ErrFile to fd 708...
	I0408 18:16:31.110083    6216 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:16:31.136248    6216 out.go:352] Setting JSON to false
	I0408 18:16:31.139306    6216 start.go:129] hostinfo: {"hostname":"minikube3","uptime":96976,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:16:31.139306    6216 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:16:31.143307    6216 out.go:177] * [functional-873100] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:16:31.145632    6216 notify.go:220] Checking for updates...
	I0408 18:16:31.146590    6216 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:16:31.148476    6216 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:16:31.151774    6216 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:16:31.154692    6216 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:16:31.157022    6216 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:16:31.161091    6216 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:16:31.161483    6216 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 18:16:36.628311    6216 out.go:177] * Using the hyperv driver based on existing profile
	I0408 18:16:36.633898    6216 start.go:297] selected driver: hyperv
	I0408 18:16:36.633898    6216 start.go:901] validating driver "hyperv" against &{Name:functional-873100 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:4000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clust
erName:functional-873100 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.46.213 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docke
r MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:16:36.634100    6216 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 18:16:36.684586    6216 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:16:36.685631    6216 cni.go:84] Creating CNI manager for ""
	I0408 18:16:36.685631    6216 cni.go:158] "hyperv" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
	I0408 18:16:36.685631    6216 start.go:340] cluster config:
	{Name:functional-873100 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:4000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8441 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:functional-873100 Namespace:default APIServerHAVIP: APIServerName:minikub
eCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[{Component:apiserver Key:enable-admission-plugins Value:NamespaceAutoProvision}] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.46.213 Port:8441 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker
MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:16:36.685631    6216 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:16:36.689336    6216 out.go:177] * Starting "functional-873100" primary control-plane node in "functional-873100" cluster
	I0408 18:16:36.692419    6216 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:16:36.692419    6216 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 18:16:36.693415    6216 cache.go:56] Caching tarball of preloaded images
	I0408 18:16:36.693415    6216 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:16:36.693415    6216 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:16:36.693415    6216 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\functional-873100\config.json ...
	I0408 18:16:36.696369    6216 start.go:360] acquireMachinesLock for functional-873100: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:16:36.696369    6216 start.go:364] duration metric: took 0s to acquireMachinesLock for "functional-873100"
	I0408 18:16:36.696369    6216 start.go:96] Skipping create...Using existing machine configuration
	I0408 18:16:36.696369    6216 fix.go:54] fixHost starting: 
	I0408 18:16:36.696369    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:39.472290    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:39.473318    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:39.473635    6216 fix.go:112] recreateIfNeeded on functional-873100: state=Running err=<nil>
	W0408 18:16:39.473635    6216 fix.go:138] unexpected machine state, will restart: <nil>
	I0408 18:16:39.476849    6216 out.go:177] * Updating the running hyperv "functional-873100" VM ...
	I0408 18:16:39.480384    6216 machine.go:93] provisionDockerMachine start ...
	I0408 18:16:39.480384    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:41.660684    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:41.660684    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:41.660790    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:44.227554    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:44.227554    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:44.233819    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:44.234582    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:44.234582    6216 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:16:44.372652    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-873100
	
	I0408 18:16:44.372820    6216 buildroot.go:166] provisioning hostname "functional-873100"
	I0408 18:16:44.372896    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:46.525464    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:49.081094    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:49.081094    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:49.086652    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:49.087319    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:49.087319    6216 main.go:141] libmachine: About to run SSH command:
	sudo hostname functional-873100 && echo "functional-873100" | sudo tee /etc/hostname
	I0408 18:16:49.253854    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: functional-873100
	
	I0408 18:16:49.253854    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:51.474378    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:51.475238    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:51.475291    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:54.044461    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:54.044461    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:54.050383    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:16:54.051127    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:16:54.051127    6216 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sfunctional-873100' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 functional-873100/g' /etc/hosts;
				else 
					echo '127.0.1.1 functional-873100' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:16:54.194433    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:16:54.194433    6216 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:16:54.194433    6216 buildroot.go:174] setting up certificates
	I0408 18:16:54.194433    6216 provision.go:84] configureAuth start
	I0408 18:16:54.194433    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:56.351196    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:16:58.957856    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:16:58.957856    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:16:58.958462    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:01.106530    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:01.107533    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:01.107638    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:03.665247    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:03.665247    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:03.665247    6216 provision.go:143] copyHostCerts
	I0408 18:17:03.665955    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:17:03.665955    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:17:03.666390    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:17:03.667888    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:17:03.667888    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:17:03.668325    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:17:03.669528    6216 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:17:03.669604    6216 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:17:03.670008    6216 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:17:03.670764    6216 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.functional-873100 san=[127.0.0.1 172.22.46.213 functional-873100 localhost minikube]
	I0408 18:17:03.745755    6216 provision.go:177] copyRemoteCerts
	I0408 18:17:03.756671    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:17:03.756671    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:05.953496    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:08.534412    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:08.535135    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:08.535250    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:08.637448    6216 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.8807374s)
	I0408 18:17:08.637982    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:17:08.682982    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1220 bytes)
	I0408 18:17:08.736779    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 18:17:08.784297    6216 provision.go:87] duration metric: took 14.5897473s to configureAuth
	I0408 18:17:08.784297    6216 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:17:08.785070    6216 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:17:08.785070    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:10.945671    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:10.945671    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:10.946380    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:13.561057    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:13.561658    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:13.569789    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:13.569789    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:13.569789    6216 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:17:13.733212    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:17:13.733311    6216 buildroot.go:70] root file system type: tmpfs
	I0408 18:17:13.733511    6216 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:17:13.733599    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:15.913111    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:15.914277    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:15.914356    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:18.525466    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:18.525466    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:18.530458    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:18.531119    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:18.531119    6216 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:17:18.697820    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:17:18.697820    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:20.856593    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:23.478351    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:23.479383    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:23.486093    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:23.486192    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:23.486192    6216 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:17:23.637970    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:17:23.637999    6216 machine.go:96] duration metric: took 44.1572332s to provisionDockerMachine
	I0408 18:17:23.637999    6216 start.go:293] postStartSetup for "functional-873100" (driver="hyperv")
	I0408 18:17:23.637999    6216 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:17:23.650712    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:17:23.650712    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:25.838240    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:25.838240    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:25.839006    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:28.426608    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:28.426608    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:28.427239    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:28.542320    6216 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8914882s)
	I0408 18:17:28.552476    6216 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:17:28.560034    6216 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:17:28.560034    6216 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:17:28.560034    6216 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:17:28.561202    6216 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:17:28.562575    6216 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\test\nested\copy\7904\hosts -> hosts in /etc/test/nested/copy/7904
	I0408 18:17:28.574228    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs /etc/test/nested/copy/7904
	I0408 18:17:28.594750    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:17:28.643651    6216 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\test\nested\copy\7904\hosts --> /etc/test/nested/copy/7904/hosts (40 bytes)
	I0408 18:17:28.693612    6216 start.go:296] duration metric: took 5.0555717s for postStartSetup
	I0408 18:17:28.693612    6216 fix.go:56] duration metric: took 51.9968266s for fixHost
	I0408 18:17:28.693612    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:30.853257    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:30.853257    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:30.854166    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:33.460251    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:33.460251    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:33.465198    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:33.465944    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:33.465944    6216 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:17:33.607112    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744136253.610857190
	
	I0408 18:17:33.607112    6216 fix.go:216] guest clock: 1744136253.610857190
	I0408 18:17:33.607112    6216 fix.go:229] Guest: 2025-04-08 18:17:33.61085719 +0000 UTC Remote: 2025-04-08 18:17:28.693612 +0000 UTC m=+57.786988401 (delta=4.91724519s)
	I0408 18:17:33.607244    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:35.768398    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:38.347997    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:38.347997    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:38.354045    6216 main.go:141] libmachine: Using SSH client type: native
	I0408 18:17:38.354602    6216 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.46.213 22 <nil> <nil>}
	I0408 18:17:38.354602    6216 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744136253
	I0408 18:17:38.515883    6216 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:17:33 UTC 2025
	
	I0408 18:17:38.515883    6216 fix.go:236] clock set: Tue Apr  8 18:17:33 UTC 2025
	 (err=<nil>)
	I0408 18:17:38.515883    6216 start.go:83] releasing machines lock for "functional-873100", held for 1m1.8190194s
	I0408 18:17:38.516256    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:40.697845    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:40.697845    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:40.698508    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:43.246296    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:43.246296    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:43.250147    6216 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:17:43.250279    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:43.259990    6216 ssh_runner.go:195] Run: cat /version.json
	I0408 18:17:43.259990    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:45.488605    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:45.489695    6216 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:17:45.489695    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:45.489853    6216 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
	I0408 18:17:48.266232    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:48.266232    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:48.266232    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:48.299617    6216 main.go:141] libmachine: [stdout =====>] : 172.22.46.213
	
	I0408 18:17:48.299617    6216 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:17:48.299723    6216 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
	I0408 18:17:48.366194    6216 ssh_runner.go:235] Completed: cat /version.json: (5.1061106s)
	I0408 18:17:48.379576    6216 ssh_runner.go:195] Run: systemctl --version
	I0408 18:17:48.384571    6216 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1343833s)
	W0408 18:17:48.384571    6216 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:17:48.401258    6216 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	W0408 18:17:48.411391    6216 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:17:48.423739    6216 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:17:48.443315    6216 cni.go:259] no active bridge cni configs found in "/etc/cni/net.d" - nothing to disable
	I0408 18:17:48.443315    6216 start.go:495] detecting cgroup driver to use...
	I0408 18:17:48.444326    6216 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:17:48.493985    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 18:17:48.497006    6216 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:17:48.497006    6216 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:17:48.525175    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:17:48.545264    6216 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:17:48.556065    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:17:48.585977    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:17:48.619596    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:17:48.655324    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:17:48.686019    6216 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:17:48.721717    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:17:48.751706    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:17:48.784088    6216 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:17:48.816035    6216 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:17:48.846901    6216 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:17:48.874989    6216 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:17:49.133526    6216 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:17:49.169930    6216 start.go:495] detecting cgroup driver to use...
	I0408 18:17:49.181273    6216 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:17:49.216427    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:17:49.251912    6216 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:17:49.303361    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:17:49.347572    6216 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:17:49.371335    6216 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:17:49.420257    6216 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:17:49.439816    6216 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:17:49.458000    6216 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:17:49.503791    6216 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:17:49.766456    6216 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:17:50.009336    6216 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:17:50.009619    6216 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:17:50.069094    6216 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:17:50.331199    6216 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:19:01.817611    6216 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m11.4858402s)
	I0408 18:19:01.830434    6216 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 18:19:01.919528    6216 out.go:201] 
	W0408 18:19:01.924846    6216 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 18:11:02 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.285816228Z" level=info msg="Starting up"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.289021896Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:02 functional-873100 dockerd[664]: time="2025-04-08T18:11:02.290101719Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=670
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.322435808Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347849049Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347887650Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347962351Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.347977152Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348052953Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348147555Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348453962Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348542564Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348563964Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348576164Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348740168Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.348978073Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352161941Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352329444Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352574050Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352739853Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.352975458Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.353142862Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395578165Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395728468Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395895572Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.395970374Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396127877Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396275880Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.396864493Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397074097Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397170899Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397192300Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397240201Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397259601Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397430305Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397477306Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397539007Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397562207Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397577108Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397589608Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397664010Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397683910Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397706711Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397725411Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397755912Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397770712Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397798113Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397810513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397829513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397844513Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397863114Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397875814Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397889314Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397907315Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397966416Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397981016Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.397993517Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398058618Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398101519Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398132320Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398145220Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398155620Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398173220Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398185821Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398570329Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398732232Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398856035Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:02 functional-873100 dockerd[670]: time="2025-04-08T18:11:02.398896236Z" level=info msg="containerd successfully booted in 0.077821s"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.376333182Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.410435463Z" level=info msg="Loading containers: start."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.571607280Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.807909023Z" level=info msg="Loading containers: done."
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830489939Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830838545Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.830930947Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.831128451Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935123166Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:03 functional-873100 dockerd[664]: time="2025-04-08T18:11:03.935406471Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:03 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:35 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.010027367Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011810864Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.011907464Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012093263Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:35 functional-873100 dockerd[664]: time="2025-04-08T18:11:35.012823862Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:36 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:36 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:36 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.061851643Z" level=info msg="Starting up"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.062875641Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:36 functional-873100 dockerd[1089]: time="2025-04-08T18:11:36.064586139Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1095
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.097709788Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124314947Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124473046Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124594646Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124721546Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124760546Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.124775346Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125024645Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125117445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125145445Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125157745Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125186045Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.125439445Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128115041Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.128211241Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130570137Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130668237Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130705437Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130727137Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.130983936Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131041436Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131060936Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131082736Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131098536Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131146736Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131650035Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131810435Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131851435Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131867735Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131882735Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131904335Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131919335Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131933935Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131953335Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131967935Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131980635Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.131992735Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132013135Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132027435Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132040035Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132052835Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132065735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132078335Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132090735Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132103535Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132117134Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132134734Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132147034Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132158934Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132170634Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132186134Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132206534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132321534Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132344034Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132462234Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132554934Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132573434Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132587634Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132597834Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132611534Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132622034Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132844333Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132919533Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.132985233Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:36 functional-873100 dockerd[1095]: time="2025-04-08T18:11:36.133024633Z" level=info msg="containerd successfully booted in 0.036423s"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.119414410Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.141280877Z" level=info msg="Loading containers: start."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.279212864Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.402609773Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.519783192Z" level=info msg="Loading containers: done."
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541830658Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.541960058Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:37 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.581977096Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:37 functional-873100 dockerd[1089]: time="2025-04-08T18:11:37.582153396Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:47 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.193154760Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195095757Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195718756Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.195932156Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:11:47 functional-873100 dockerd[1089]: time="2025-04-08T18:11:47.196103555Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:11:48 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:11:48 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:11:48 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.248289531Z" level=info msg="Starting up"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.249171830Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:11:48 functional-873100 dockerd[1451]: time="2025-04-08T18:11:48.250091828Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=1457
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.280748981Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306283342Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306317242Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306364141Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306378641Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306400741Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306429041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306579241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306704441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306725241Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306738841Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306794041Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.306916441Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309432637Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309562037Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309828436Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309934036Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309963936Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.309986136Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310391735Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310530535Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310596235Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310629135Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310692935Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.310788435Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311178234Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311419334Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311575233Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311616433Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311728833Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311753533Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311769133Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311800133Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311818133Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311833133Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311862633Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311876433Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311915033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311929233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311944433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311958033Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.311970933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312000233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312012233Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312024733Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312053133Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312066933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312078833Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312090433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312101633Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312117333Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312137933Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312150433Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312162833Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312220532Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312404232Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312550932Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312587832Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312598432Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312661132Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.312681232Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313231631Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313512930Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313641230Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:11:48 functional-873100 dockerd[1457]: time="2025-04-08T18:11:48.313684230Z" level=info msg="containerd successfully booted in 0.034228s"
	Apr 08 18:11:49 functional-873100 dockerd[1451]: time="2025-04-08T18:11:49.296381813Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.302038774Z" level=info msg="Loading containers: start."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.431387574Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.542525702Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.658559423Z" level=info msg="Loading containers: done."
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689372876Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.689573575Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728601715Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:11:52 functional-873100 dockerd[1451]: time="2025-04-08T18:11:52.728727315Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:11:52 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.816831395Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817023212Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817060615Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.817526956Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840205332Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840399149Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840418851Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.840593166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904521638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.904962276Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905346110Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.905750645Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947425577Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947659497Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.947910619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:01 functional-873100 dockerd[1457]: time="2025-04-08T18:12:01.948982913Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273517988Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273663500Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.273923621Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.274384159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386027070Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386105976Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386123978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.386316893Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491472175Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.491940813Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.492151831Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.495654317Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503416550Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.503579963Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504120507Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:02 functional-873100 dockerd[1457]: time="2025-04-08T18:12:02.504549242Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702219637Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702344541Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.702359842Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.708973385Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972305473Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972723788Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.972915195Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:14 functional-873100 dockerd[1457]: time="2025-04-08T18:12:14.974488853Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.122409705Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124796787Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.124979393Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.127466878Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224670622Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224748824Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224794626Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.224931330Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.963794103Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965137848Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965192850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:15 functional-873100 dockerd[1457]: time="2025-04-08T18:12:15.965355156Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022501487Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022595286Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.022617086Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:16 functional-873100 dockerd[1457]: time="2025-04-08T18:12:16.023328883Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.794896489Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795055488Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795071088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:22 functional-873100 dockerd[1457]: time="2025-04-08T18:12:22.795904285Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083642645Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083725345Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.083740345Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:23 functional-873100 dockerd[1457]: time="2025-04-08T18:12:23.084285643Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.190627079Z" level=info msg="ignoring event" container=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191449376Z" level=info msg="shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191544976Z" level=warning msg="cleaning up after shim disconnected" id=d020b310a6297ae7191fc76e558ad6951ace548c74820fdb839b4ec0c6df7ef7 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.191554576Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.375599152Z" level=info msg="shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1451]: time="2025-04-08T18:12:26.375617052Z" level=info msg="ignoring event" container=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376550948Z" level=warning msg="cleaning up after shim disconnected" id=ac1093931e54115d12d6ae6661cd0c68e13553c2036e9cdeb723fc6f95356af0 namespace=moby
	Apr 08 18:12:26 functional-873100 dockerd[1457]: time="2025-04-08T18:12:26.376624948Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:12 functional-873100 dockerd[1451]: time="2025-04-08T18:14:12.779941913Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:14:12 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.049546353Z" level=info msg="ignoring event" container=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.053619844Z" level=info msg="shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056433738Z" level=warning msg="cleaning up after shim disconnected" id=b4a6e746de8cc5f7d015bc4f11f01cbbe14827afd4cc6d902105cf219e56b38c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.056714338Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.057422336Z" level=info msg="ignoring event" container=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058557534Z" level=info msg="shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058756533Z" level=warning msg="cleaning up after shim disconnected" id=e3de1a7fc5c78233d408e71f42d4b4cd4bd18ab2e4e04414ec802a6f7ef69273 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.058914733Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080404288Z" level=info msg="shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.080552188Z" level=info msg="ignoring event" container=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080578588Z" level=warning msg="cleaning up after shim disconnected" id=bf1f2390bea52a7c1d5a612e155dba4aa1ba9a5b2a012ea1a4efbf042bea7e0e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.080688988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.091796765Z" level=info msg="shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092127964Z" level=warning msg="cleaning up after shim disconnected" id=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.092400564Z" level=info msg="ignoring event" container=2e85b4bd7ece3ca8ac4df68f97a703320b6006cefbc6722624a36d486984d6fa module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.092802263Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.094192260Z" level=info msg="ignoring event" container=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.095204558Z" level=info msg="shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.096325355Z" level=warning msg="cleaning up after shim disconnected" id=8e4a3ecb252be7c16ac955e7771245073440e27b0b2ae53ec653c855b50741ed namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.097089854Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.107881831Z" level=info msg="ignoring event" container=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.108023331Z" level=info msg="ignoring event" container=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.107284333Z" level=info msg="shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109336728Z" level=warning msg="cleaning up after shim disconnected" id=a34ccea9b9058d578a3e30e1844c79a8fd1eb9dd47a60810f8967a7dbf78794c namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.109635628Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123141600Z" level=info msg="shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123231799Z" level=warning msg="cleaning up after shim disconnected" id=c443f585f97dede5dee58f29597a04f50d7f9f2ca32ffb6f684db7baec17d401 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.123281399Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.140994763Z" level=info msg="ignoring event" container=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141039262Z" level=info msg="ignoring event" container=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.141064662Z" level=info msg="ignoring event" container=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.154970933Z" level=info msg="shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158618226Z" level=warning msg="cleaning up after shim disconnected" id=d71988bc063937f29dd85f846879554bf84c058a7c79e6f86126b61b1c46cafb namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160237223Z" level=info msg="ignoring event" container=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1451]: time="2025-04-08T18:14:13.160291122Z" level=info msg="ignoring event" container=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159160825Z" level=info msg="shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161423220Z" level=warning msg="cleaning up after shim disconnected" id=f12c4e597e4b1c1ae6a5c1941bd37a1b4defcd22723209ffb4ca0f1655329838 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.159002925Z" level=info msg="shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184316372Z" level=warning msg="cleaning up after shim disconnected" id=5648c41937840711f928c3d611c16a00a42d308bf7e285a8cff6a1abfc52e31f namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.184601572Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.181909277Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.161173321Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.156581930Z" level=info msg="shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200229939Z" level=warning msg="cleaning up after shim disconnected" id=f854a92d7b4d8751c763125cd77837350ff790a776aed73f784b6b23733c886e namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.200392739Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.158867925Z" level=info msg="shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207020825Z" level=warning msg="cleaning up after shim disconnected" id=4a5302521d9fc5fa5132e4d80c7289675dd38ec7047e5d8b4dc14c28f14068f2 namespace=moby
	Apr 08 18:14:13 functional-873100 dockerd[1457]: time="2025-04-08T18:14:13.207082925Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1451]: time="2025-04-08T18:14:18.018818225Z" level=info msg="ignoring event" container=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019424024Z" level=info msg="shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.019904223Z" level=warning msg="cleaning up after shim disconnected" id=c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924 namespace=moby
	Apr 08 18:14:18 functional-873100 dockerd[1457]: time="2025-04-08T18:14:18.020413522Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.902208957Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5
	Apr 08 18:14:22 functional-873100 dockerd[1451]: time="2025-04-08T18:14:22.948884062Z" level=info msg="ignoring event" container=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949176064Z" level=info msg="shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949669067Z" level=warning msg="cleaning up after shim disconnected" id=6cc156bfcb5ef4e7edd872434653800de6f014e6210889a7c72302b91ac0f4b5 namespace=moby
	Apr 08 18:14:22 functional-873100 dockerd[1457]: time="2025-04-08T18:14:22.949710067Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011786167Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011902768Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=moby
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.011929568Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:14:23 functional-873100 dockerd[1451]: time="2025-04-08T18:14:23.012461671Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:14:24 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:14:24 functional-873100 systemd[1]: docker.service: Consumed 5.669s CPU time.
	Apr 08 18:14:24 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.068343469Z" level=info msg="Starting up"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.069677076Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 18:14:24 functional-873100 dockerd[4359]: time="2025-04-08T18:14:24.070729582Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=4365
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.100696746Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126107485Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126196286Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126232186Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126245486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126268786Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126279486Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126454287Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126574788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126593088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126603888Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126634788Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.126741089Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129837506Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.129932306Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130405209Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130549410Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130614310Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130636510Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130875412Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130941212Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.130957112Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131033513Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131058113Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131119613Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131318314Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131609316Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131702516Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131719916Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131730916Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131741816Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131752716Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131764017Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131775117Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131793717Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131805917Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131816117Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131833817Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131846517Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131857117Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131868417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131878417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131889417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131905217Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131917717Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131928417Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131940317Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131951018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131961018Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.131970918Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132090118Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132112418Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132124718Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132134419Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132185119Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132206419Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132217619Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132228719Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132237919Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132248619Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132258319Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132570921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132663921Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132859123Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 18:14:24 functional-873100 dockerd[4365]: time="2025-04-08T18:14:24.132897823Z" level=info msg="containerd successfully booted in 0.033088s"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.116284488Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.164259928Z" level=info msg="Loading containers: start."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.397701599Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.522205623Z" level=info msg="Default bridge (docker0) is assigned with an IP address 172.17.0.0/16. Daemon option --bip can be used to set a preferred IP address"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.639097209Z" level=info msg="Loading containers: done."
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663639532Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.663772733Z" level=info msg="Daemon has completed initialization"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.702622627Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 18:14:25 functional-873100 dockerd[4359]: time="2025-04-08T18:14:25.703771933Z" level=info msg="API listen on [::]:2376"
	Apr 08 18:14:25 functional-873100 systemd[1]: Started Docker Application Container Engine.
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.332794698Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.336431912Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339499324Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.339700424Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.340027126Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346149349Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.346777551Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.348561258Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570602894Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570863695Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.570947695Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.571185596Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658587125Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658827926Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.658936527Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.659228328Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.703761195Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704154597Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704321797Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.704758199Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735296714Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735429615Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735452515Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.735781416Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.870831224Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871015625Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871100025Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:28 functional-873100 dockerd[4365]: time="2025-04-08T18:14:28.871268526Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041786953Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041857554Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.041871054Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.043550159Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.292833507Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293100408Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.293297408Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.294164111Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747736954Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747874254Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.747890754Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.759064792Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783055174Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783139474Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783155474Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.783261874Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.801445836Z" level=info msg="ignoring event" container=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802733841Z" level=info msg="shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.802966241Z" level=warning msg="cleaning up after shim disconnected" id=56f4b1d6d943304d9606304458b147772a2551f649b1f8ebab25c15cfce8a5a8 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.803082242Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.809900565Z" level=info msg="ignoring event" container=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810781768Z" level=info msg="shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.810912668Z" level=warning msg="cleaning up after shim disconnected" id=b1dfc0a6e15d581722c759690b8b9ec7497a32a969702730857f2f9ec78d72ec namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.811062969Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.825885519Z" level=info msg="ignoring event" container=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.827939026Z" level=info msg="ignoring event" container=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.828877629Z" level=info msg="shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829460931Z" level=warning msg="cleaning up after shim disconnected" id=2a0e2f30a1814068313aa682bd4607424d828e3b9fdfcedc52f4993b7aa7c71d namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829749532Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.829121630Z" level=info msg="shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.831057437Z" level=warning msg="cleaning up after shim disconnected" id=78e2e75be56900c060201a89193117a0f9da03d5816675be58d30bdd3db1e102 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.885542622Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.886211624Z" level=info msg="shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887807230Z" level=warning msg="cleaning up after shim disconnected" id=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.887863730Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.907464597Z" level=info msg="ignoring event" container=8181b4c4cd39bc9ef0f35812790d5e2f912f1cd4b3f3cede057028f923ba3dc4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.946953731Z" level=info msg="shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947152032Z" level=warning msg="cleaning up after shim disconnected" id=cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.947260432Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.958927872Z" level=info msg="shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.959093972Z" level=warning msg="cleaning up after shim disconnected" id=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.975088427Z" level=info msg="ignoring event" container=e2ed380437cf31b31f7ab757fafff818bed45b77c51b678911bf48d643517e2b module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.976303131Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4359]: time="2025-04-08T18:14:29.979795943Z" level=info msg="ignoring event" container=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.974915726Z" level=info msg="shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988113471Z" level=warning msg="cleaning up after shim disconnected" id=99a30b3bfae2dfebdb40d0e8db6bdd7dd7344a3d2d758756e4e3fc714fcc104a namespace=moby
	Apr 08 18:14:29 functional-873100 dockerd[4365]: time="2025-04-08T18:14:29.988215471Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.045577851Z" level=info msg="shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.046177453Z" level=warning msg="cleaning up after shim disconnected" id=9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91 namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.057572988Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.133079619Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to remove runc container\" error=\"runc did not terminate successfully: exit status 255: \" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.166032719Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.167119423Z" level=error msg="copy shim log" error="read /proc/self/fd/48: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175573749Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.175584449Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.203653735Z" level=warning msg="cleanup warnings time=\"2025-04-08T18:14:30Z\" level=warning msg=\"failed to read init pid file\" error=\"open /run/docker/containerd/daemon/io.containerd.runtime.v2.task/moby/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/init.pid: no such file or directory\" runtime=io.containerd.runc.v2\n" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.204855538Z" level=error msg="copy shim log" error="read /proc/self/fd/50: file already closed" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208656650Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.208769450Z" level=error msg="stream copy error: reading from a closed fifo"
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.341966357Z" level=error msg="collecting stats for container /k8s_storage-provisioner_storage-provisioner_kube-system_4be23f5b-6dcb-49ce-ac59-cfc1301fdbee_1: invalid id: "
	Apr 08 18:14:30 functional-873100 dockerd[4359]: 2025/04/08 18:14:30 http: superfluous response.WriteHeader call from go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp.(*respWriterWrapper).WriteHeader (wrap.go:98)
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.541261167Z" level=info msg="ignoring event" container=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542776071Z" level=info msg="shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542827172Z" level=warning msg="cleaning up after shim disconnected" id=cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.542837372Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4359]: time="2025-04-08T18:14:30.566194643Z" level=info msg="ignoring event" container=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566032643Z" level=info msg="shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.566907745Z" level=warning msg="cleaning up after shim disconnected" id=4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.567603047Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835197266Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835287766Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835327966Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.835459166Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911686499Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911805000Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.911955900Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:30 functional-873100 dockerd[4365]: time="2025-04-08T18:14:30.912386202Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101356747Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101551148Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101662048Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:31 functional-873100 dockerd[4365]: time="2025-04-08T18:14:31.101882249Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415314838Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415450438Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415603738Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.415976039Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474268465Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474348065Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474365065Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.474507465Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.482192682Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483364584Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.483571085Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.484600587Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526458677Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526763178Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526813978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:33 functional-873100 dockerd[4365]: time="2025-04-08T18:14:33.526935978Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133647850Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133723750Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133738750Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:34 functional-873100 dockerd[4365]: time="2025-04-08T18:14:34.133835850Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.286971612Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.287313112Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.290652915Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.294746619Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366454090Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366574390Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366587390Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.366700890Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396505520Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.396902520Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.397144620Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.399433222Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727525851Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727736952Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.727833252Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.728181052Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.850915184Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852051185Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852067185Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:38 functional-873100 dockerd[4365]: time="2025-04-08T18:14:38.852166785Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.064848638Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065394739Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.065518940Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:14:39 functional-873100 dockerd[4365]: time="2025-04-08T18:14:39.066853044Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:17:50 functional-873100 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.365807392Z" level=info msg="Processing signal 'terminated'"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568261688Z" level=info msg="shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568876587Z" level=warning msg="cleaning up after shim disconnected" id=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.568997887Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.589148847Z" level=info msg="ignoring event" container=24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623443078Z" level=info msg="shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623584378Z" level=warning msg="cleaning up after shim disconnected" id=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.623887877Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.625252675Z" level=info msg="ignoring event" container=608e72cc28ce1e88c48ff79993f6c83fe4196161a4df238c6bf8056b660932e8 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.627785270Z" level=info msg="ignoring event" container=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627830469Z" level=info msg="shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627921669Z" level=warning msg="cleaning up after shim disconnected" id=d6740eb8a0785d708135b617465542fb2d4f8c6f34b942fdd30149eeb64d4763 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.627934569Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.642668740Z" level=info msg="ignoring event" container=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644475036Z" level=info msg="shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644564436Z" level=warning msg="cleaning up after shim disconnected" id=9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.644577436Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.683973658Z" level=info msg="ignoring event" container=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686629852Z" level=info msg="shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686863652Z" level=warning msg="cleaning up after shim disconnected" id=3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.686935352Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.689544746Z" level=info msg="ignoring event" container=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691461743Z" level=info msg="shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691757842Z" level=warning msg="cleaning up after shim disconnected" id=8a7b8dd6d842bc65fe2a7e93ce1b3e1070ec13d069bb3b6af7d032095e64c10c namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.691850042Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698043329Z" level=info msg="shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.698224529Z" level=warning msg="cleaning up after shim disconnected" id=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.699562326Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701518623Z" level=info msg="shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701583722Z" level=warning msg="cleaning up after shim disconnected" id=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.701597022Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.704846016Z" level=info msg="shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706195213Z" level=info msg="ignoring event" container=8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706384813Z" level=info msg="ignoring event" container=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706434613Z" level=info msg="ignoring event" container=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.706453213Z" level=info msg="ignoring event" container=cb6fdef3ad360944bf76a489bc2aa657b627a58e516b874e913cebd80906fe20 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708810608Z" level=warning msg="cleaning up after shim disconnected" id=25fd4d5b12c367c2bbfe36fbadad6d9c26e400e1080f734d13d11f7f181a9dd6 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.708891008Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.706709612Z" level=info msg="shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711050604Z" level=warning msg="cleaning up after shim disconnected" id=2ad556aa57d9949c2d6684ba4739fce4bdb85b821795aae7eba3c022792bb2a4 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.711111803Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723156879Z" level=info msg="ignoring event" container=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4359]: time="2025-04-08T18:17:50.723218679Z" level=info msg="ignoring event" container=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728637968Z" level=info msg="shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728820868Z" level=warning msg="cleaning up after shim disconnected" id=12e5e01406f27eddbbaecaa5560181651a8bf74273dde3d9b4c09842d2999204 namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.728838268Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739136148Z" level=info msg="shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739219747Z" level=warning msg="cleaning up after shim disconnected" id=3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee namespace=moby
	Apr 08 18:17:50 functional-873100 dockerd[4365]: time="2025-04-08T18:17:50.739233947Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4359]: time="2025-04-08T18:17:55.602976948Z" level=info msg="ignoring event" container=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605413243Z" level=info msg="shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.605936142Z" level=warning msg="cleaning up after shim disconnected" id=fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718 namespace=moby
	Apr 08 18:17:55 functional-873100 dockerd[4365]: time="2025-04-08T18:17:55.606055942Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.606652814Z" level=info msg="Container failed to exit within 10s of signal 15 - using the force" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.663136604Z" level=info msg="ignoring event" container=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e module=libcontainerd namespace=moby topic=/tasks/delete type="*events.TaskDelete"
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663487579Z" level=info msg="shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663555775Z" level=warning msg="cleaning up after shim disconnected" id=d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4365]: time="2025-04-08T18:18:00.663584972Z" level=info msg="cleaning up dead shim" namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733154233Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733206530Z" level=info msg="Daemon shutdown complete"
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733487510Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 18:18:00 functional-873100 dockerd[4359]: time="2025-04-08T18:18:00.733578803Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 18:18:01 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:18:01 functional-873100 systemd[1]: docker.service: Consumed 9.706s CPU time.
	Apr 08 18:18:01 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 18:18:01 functional-873100 dockerd[8415]: time="2025-04-08T18:18:01.793220155Z" level=info msg="Starting up"
	Apr 08 18:19:01 functional-873100 dockerd[8415]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 18:19:01 functional-873100 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 18:19:01 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 18:19:01.925950    6216 out.go:270] * 
	W0408 18:19:01.927406    6216 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 18:19:01.932212    6216 out.go:201] 
	
	
	==> Docker <==
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error getting RW layer size for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="Set backoffDuration to : 1m0s for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d'"
	Apr 08 18:23:02 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:23:02Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:23:02 functional-873100 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 18:23:02 functional-873100 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 18:23:02 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:23:03Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = Unknown desc = failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[  +5.449904] systemd-fstab-generator[2412]: Ignoring "noauto" option for root device
	[  +0.203006] kauditd_printk_skb: 12 callbacks suppressed
	[  +8.232409] kauditd_printk_skb: 88 callbacks suppressed
	[ +28.267561] kauditd_printk_skb: 10 callbacks suppressed
	[Apr 8 18:14] systemd-fstab-generator[3877]: Ignoring "noauto" option for root device
	[  +0.672076] systemd-fstab-generator[3911]: Ignoring "noauto" option for root device
	[  +0.280795] systemd-fstab-generator[3923]: Ignoring "noauto" option for root device
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	
	
	==> kernel <==
	 18:24:03 up 14 min,  0 users,  load average: 0.00, 0.06, 0.08
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124454    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124541    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124454    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124682    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124796    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.125105    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.124663    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.125259    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.125357    5867 log.go:32] "Version from runtime service failed" err="rpc error: code = Unknown desc = failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: I0408 18:24:03.125426    5867 setters.go:602] "Node became not ready" node="functional-873100" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-04-08T18:24:03Z","lastTransitionTime":"2025-04-08T18:24:03Z","reason":"KubeletNotReady","message":"[container runtime is down, PLEG is not healthy: pleg was last seen active 6m13.605919263s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @-\u003e/var/run/docker.sock: read: connection reset by peer]"}
	Apr 08 18:24:03 functional-873100 kubelet[5867]: I0408 18:24:03.124816    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.126183    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.126423    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.126600    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.127041    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.127582    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.127984    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.125436    5867 kubelet.go:3018] "Container runtime not ready" runtimeReady="RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.131656    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:24:03Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:24:03Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:24:03Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:24:03Z\\\",\\\"lastTransitionTime\\\":\\\"2025-04-08T18:24:03Z\\\",\\\"message\\\":\\\"[container runtime is down, PLEG is not healthy: pleg was last seen active 6m13.605919263s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to g
et docker version: failed to get docker version from dockerd: error during connect: Get \\\\\\\"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\\\\\\\": read unix @-\\\\u003e/var/run/docker.sock: read: connection reset by peer]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"nodeInfo\\\":{\\\"containerRuntimeVersion\\\":\\\"docker://Unknown\\\"}}}\" for node \"functional-873100\": Patch \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100/status?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.133068    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.134859    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.135946    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.137105    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.137198    5867 kubelet_node_status.go:536] "Unable to update node status" err="update node status exceeds retry count"
	Apr 08 18:24:03 functional-873100 kubelet[5867]: E0408 18:24:03.193528    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 6m13.673984561s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/var/run/docker.sock: read: connection reset by peer]"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:22:02.642686    3116 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.676494    3116 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.708832    3116 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.736848    3116 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.768952    3116 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.804287    3116 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:22:02.840603    3116 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	permission denied while trying to connect to the Docker daemon socket at unix:///var/run/docker.sock: Get "http://%2Fvar%2Frun%2Fdocker.sock/v1.47/containers/json?all=1&filters=%7B%22name%22%3A%7B%22k8s_kindnet%22%3Atrue%7D%7D": dial unix /var/run/docker.sock: connect: permission denied
	E0408 18:23:02.913453    3116 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.2173017s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/serial/ComponentHealth (180.66s)

                                                
                                    
x
+
TestFunctional/serial/InvalidService (4.24s)

                                                
                                                
=== RUN   TestFunctional/serial/InvalidService
functional_test.go:2338: (dbg) Run:  kubectl --context functional-873100 apply -f testdata\invalidsvc.yaml
functional_test.go:2338: (dbg) Non-zero exit: kubectl --context functional-873100 apply -f testdata\invalidsvc.yaml: exit status 1 (4.2295934s)

                                                
                                                
** stderr ** 
	error: error validating "testdata\\invalidsvc.yaml": error validating data: failed to download openapi: Get "https://172.22.46.213:8441/openapi/v2?timeout=32s": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.; if you choose to ignore these errors, turn validation off with --validate=false

                                                
                                                
** /stderr **
functional_test.go:2340: kubectl --context functional-873100 apply -f testdata\invalidsvc.yaml failed: exit status 1
--- FAIL: TestFunctional/serial/InvalidService (4.24s)

                                                
                                    
x
+
TestFunctional/parallel/StatusCmd (249.31s)

                                                
                                                
=== RUN   TestFunctional/parallel/StatusCmd
=== PAUSE TestFunctional/parallel/StatusCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/StatusCmd
functional_test.go:871: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 status
functional_test.go:871: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 status: exit status 2 (14.6727003s)

                                                
                                                
-- stdout --
	functional-873100
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Stopped
	kubeconfig: Configured
	

                                                
                                                
-- /stdout --
functional_test.go:873: failed to run minikube status. args "out/minikube-windows-amd64.exe -p functional-873100 status" : exit status 2
functional_test.go:877: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}
functional_test.go:877: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}: exit status 2 (14.2650192s)

                                                
                                                
-- stdout --
	host:Running,kublet:Running,apiserver:Stopped,kubeconfig:Configured

                                                
                                                
-- /stdout --
functional_test.go:879: failed to run minikube status with custom format: args "out/minikube-windows-amd64.exe -p functional-873100 status -f host:{{.Host}},kublet:{{.Kubelet}},apiserver:{{.APIServer}},kubeconfig:{{.Kubeconfig}}": exit status 2
functional_test.go:889: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 status -o json
functional_test.go:889: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 status -o json: exit status 2 (14.099445s)

                                                
                                                
-- stdout --
	{"Name":"functional-873100","Host":"Running","Kubelet":"Running","APIServer":"Stopped","Kubeconfig":"Configured","Worker":false}

                                                
                                                
-- /stdout --
functional_test.go:891: failed to run minikube status with json output. args "out/minikube-windows-amd64.exe -p functional-873100 status -o json" : exit status 2
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (13.8746217s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/parallel/StatusCmd FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/StatusCmd]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (2m59.9843217s)
helpers_test.go:252: TestFunctional/parallel/StatusCmd logs: 
-- stdout --
	
	==> Audit <==
	|---------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| Command |                                                Args                                                 |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| cache   | functional-873100 cache reload                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	| ssh     | functional-873100 ssh                                                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | sudo crictl inspecti                                                                                |                   |                   |         |                     |                     |
	|         | registry.k8s.io/pause:latest                                                                        |                   |                   |         |                     |                     |
	| cache   | delete                                                                                              | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:3.1                                                                           |                   |                   |         |                     |                     |
	| cache   | delete                                                                                              | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | registry.k8s.io/pause:latest                                                                        |                   |                   |         |                     |                     |
	| kubectl | functional-873100 kubectl --                                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC | 08 Apr 25 18:16 UTC |
	|         | --context functional-873100                                                                         |                   |                   |         |                     |                     |
	|         | get pods                                                                                            |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:16 UTC |                     |
	|         | --extra-config=apiserver.enable-admission-plugins=NamespaceAutoProvision                            |                   |                   |         |                     |                     |
	|         | --wait=all                                                                                          |                   |                   |         |                     |                     |
	| config  | functional-873100 config unset                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | cpus                                                                                                |                   |                   |         |                     |                     |
	| cp      | functional-873100 cp                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | testdata\cp-test.txt                                                                                |                   |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| config  | functional-873100 config get                                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | cpus                                                                                                |                   |                   |         |                     |                     |
	| config  | functional-873100 config set                                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | cpus 2                                                                                              |                   |                   |         |                     |                     |
	| config  | functional-873100 config get                                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | cpus                                                                                                |                   |                   |         |                     |                     |
	| config  | functional-873100 config unset                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | cpus                                                                                                |                   |                   |         |                     |                     |
	| config  | functional-873100 config get                                                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | cpus                                                                                                |                   |                   |         |                     |                     |
	| service | functional-873100 service list                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	| ssh     | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| service | functional-873100 service list                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | -o json                                                                                             |                   |                   |         |                     |                     |
	| service | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | --namespace=default --https                                                                         |                   |                   |         |                     |                     |
	|         | --url hello-node                                                                                    |                   |                   |         |                     |                     |
	| cp      | functional-873100 cp functional-873100:/home/docker/cp-test.txt                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalparallelCpCmd1674743440\001\cp-test.txt |                   |                   |         |                     |                     |
	| service | functional-873100                                                                                   | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | service hello-node --url                                                                            |                   |                   |         |                     |                     |
	|         | --format={{.IP}}                                                                                    |                   |                   |         |                     |                     |
	| ssh     | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|         | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| service | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | hello-node --url                                                                                    |                   |                   |         |                     |                     |
	| cp      | functional-873100 cp                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:31 UTC |
	|         | testdata\cp-test.txt                                                                                |                   |                   |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|         | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|         | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|         | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	| ssh     | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|         | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|         | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start   | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|         | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|         | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|         | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	|---------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:31:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	
	==> Docker <==
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:33:05 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:33:05Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:33:05 functional-873100 systemd[1]: docker.service: Scheduled restart job, restart counter is at 15.
	Apr 08 18:33:05 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:33:05 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:33:07Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[  +5.449904] systemd-fstab-generator[2412]: Ignoring "noauto" option for root device
	[  +0.203006] kauditd_printk_skb: 12 callbacks suppressed
	[  +8.232409] kauditd_printk_skb: 88 callbacks suppressed
	[ +28.267561] kauditd_printk_skb: 10 callbacks suppressed
	[Apr 8 18:14] systemd-fstab-generator[3877]: Ignoring "noauto" option for root device
	[  +0.672076] systemd-fstab-generator[3911]: Ignoring "noauto" option for root device
	[  +0.280795] systemd-fstab-generator[3923]: Ignoring "noauto" option for root device
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	
	
	==> kernel <==
	 18:34:05 up 24 min,  0 users,  load average: 0.07, 0.06, 0.06
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.734160    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.734837    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735618    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735674    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735710    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735751    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735985    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.736055    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.736073    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.736148    5867 kubelet.go:3018] "Container runtime not ready" runtimeReady="RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.736210    5867 log.go:32] "Version from runtime service failed" err="rpc error: code = Unknown desc = failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: I0408 18:34:05.736385    5867 setters.go:602] "Node became not ready" node="functional-873100" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-04-08T18:34:05Z","lastTransitionTime":"2025-04-08T18:34:05Z","reason":"KubeletNotReady","message":"[container runtime is down, PLEG is not healthy: pleg was last seen active 16m16.216782449s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"}
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.735367    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.737617    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: I0408 18:34:05.738534    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.739464    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.741678    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.743746    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.744106    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:34:05Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:34:05Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:34:05Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:34:05Z\\\",\\\"lastTransitionTime\\\":\\\"2025-04-08T18:34:05Z\\\",\\\"message\\\":\\\"[container runtime is down, PLEG is not healthy: pleg was last seen active 16m16.216782449s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to
get docker version: failed to get docker version from dockerd: Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"nodeInfo\\\":{\\\"containerRuntimeVersion\\\":\\\"docker://Unknown\\\"}}}\" for node \"functional-873100\": Patch \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100/status?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.747833    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.749678    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.750802    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.751870    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.751956    5867 kubelet_node_status.go:536] "Unable to update node status" err="update node status exceeds retry count"
	Apr 08 18:34:05 functional-873100 kubelet[5867]: E0408 18:34:05.862816    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/etcd-functional-873100.18346ab29d6d446e\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{etcd-functional-873100.18346ab29d6d446e  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:etcd-functional-873100,UID:3e3dca81cfb79388693d0fc85f2bc1ca,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Unhealthy,Message:Readiness probe failed: Get \"http://127.0.0.1:2381/readyz\": dial tcp 127.0.0.1:2381: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:51.077131374 +0000 UTC m=+198.570771328,LastTimestamp:2025-04-08 18:17:53.078344483 +0000 UTC m=+200.
571984437,Count:4,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:32:05.193583    9672 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:32:05.242950    9672 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:32:05.276574    9672 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:32:05.306011    9672 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:33:05.436352    9672 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:33:05.478975    9672 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:33:05.534194    9672 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:33:05.581176    9672 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.3959094s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/parallel/StatusCmd (249.31s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmdConnect (181.19s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmdConnect
=== PAUSE TestFunctional/parallel/ServiceCmdConnect

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ServiceCmdConnect
functional_test.go:1646: (dbg) Run:  kubectl --context functional-873100 create deployment hello-node-connect --image=registry.k8s.io/echoserver:1.8
functional_test.go:1646: (dbg) Non-zero exit: kubectl --context functional-873100 create deployment hello-node-connect --image=registry.k8s.io/echoserver:1.8: exit status 1 (2.1632165s)

                                                
                                                
** stderr ** 
	error: failed to create deployment: Post "https://172.22.46.213:8441/apis/apps/v1/namespaces/default/deployments?fieldManager=kubectl-create&fieldValidation=Strict": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1650: failed to create hello-node deployment with this command "kubectl --context functional-873100 create deployment hello-node-connect --image=registry.k8s.io/echoserver:1.8": exit status 1.
functional_test.go:1615: service test failed - dumping debug information
functional_test.go:1616: -----------------------service failure post-mortem--------------------------------
functional_test.go:1619: (dbg) Run:  kubectl --context functional-873100 describe po hello-node-connect
functional_test.go:1619: (dbg) Non-zero exit: kubectl --context functional-873100 describe po hello-node-connect: exit status 1 (2.2111968s)

                                                
                                                
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1621: "kubectl --context functional-873100 describe po hello-node-connect" failed: exit status 1
functional_test.go:1623: hello-node pod describe:
functional_test.go:1625: (dbg) Run:  kubectl --context functional-873100 logs -l app=hello-node-connect
functional_test.go:1625: (dbg) Non-zero exit: kubectl --context functional-873100 logs -l app=hello-node-connect: exit status 1 (2.1837673s)

                                                
                                                
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1627: "kubectl --context functional-873100 logs -l app=hello-node-connect" failed: exit status 1
functional_test.go:1629: hello-node logs:
functional_test.go:1631: (dbg) Run:  kubectl --context functional-873100 describe svc hello-node-connect
functional_test.go:1631: (dbg) Non-zero exit: kubectl --context functional-873100 describe svc hello-node-connect: exit status 1 (2.161545s)

                                                
                                                
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1633: "kubectl --context functional-873100 describe svc hello-node-connect" failed: exit status 1
functional_test.go:1635: hello-node svc describe:
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.0290511s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/parallel/ServiceCmdConnect FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/ServiceCmdConnect]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
E0408 18:39:55.308738    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (2m27.7029861s)
helpers_test.go:252: TestFunctional/parallel/ServiceCmdConnect logs: 
-- stdout --
	
	==> Audit <==
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	|  Command   |                                 Args                                  |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| ssh        | functional-873100 ssh echo                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|            | hello                                                                 |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh cat                                             | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|            | /etc/hostname                                                         |                   |                   |         |                     |                     |
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| addons     | functional-873100 addons list                                         | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| addons     | functional-873100 addons list                                         | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|            | -o json                                                               |                   |                   |         |                     |                     |
	| dashboard  | --url --port 36195                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | -p functional-873100                                                  |                   |                   |         |                     |                     |
	|            | --alsologtostderr -v=1                                                |                   |                   |         |                     |                     |
	| license    |                                                                       | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| ssh        | functional-873100 ssh sudo                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | systemctl is-active crio                                              |                   |                   |         |                     |                     |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:32 UTC | 08 Apr 25 18:33 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:33 UTC | 08 Apr 25 18:34 UTC |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:34 UTC | 08 Apr 25 18:35 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:36 UTC |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/7904.pem                                               |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /usr/share/ca-certificates/7904.pem                                   |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/51391683.0                                             |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/79042.pem                                              |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:36 UTC |
	|            | /usr/share/ca-certificates/79042.pem                                  |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC | 08 Apr 25 18:36 UTC |
	|            | /etc/ssl/certs/3ec20f2e.0                                             |                   |                   |         |                     |                     |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC | 08 Apr 25 18:37 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| docker-env | functional-873100 docker-env                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:37 UTC | 08 Apr 25 18:38 UTC |
	| image      | functional-873100 image save kicbase/echo-server:functional-873100    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:38 UTC | 08 Apr 25 18:39 UTC |
	|            | C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image rm                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:39 UTC |                     |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:31:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	
	==> Docker <==
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:41:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:41:07 functional-873100 systemd[1]: docker.service: Scheduled restart job, restart counter is at 3.
	Apr 08 18:41:07 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:41:07 functional-873100 cri-dockerd[4661]: W0408 18:41:07.664212    4661 logging.go:59] [core] [Server #1] grpc: Server.processUnaryRPC failed to write status: connection error: desc = "transport is closing"
	Apr 08 18:41:07 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:41:09Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	[Apr 8 18:36] systemd-fstab-generator[14031]: Ignoring "noauto" option for root device
	[Apr 8 18:37] systemd-fstab-generator[14218]: Ignoring "noauto" option for root device
	[  +0.130934] kauditd_printk_skb: 12 callbacks suppressed
	[Apr 8 18:41] systemd-fstab-generator[15444]: Ignoring "noauto" option for root device
	[  +0.178916] kauditd_printk_skb: 12 callbacks suppressed
	[Apr 8 18:42] systemd-fstab-generator[15781]: Ignoring "noauto" option for root device
	[  +0.178344] kauditd_printk_skb: 12 callbacks suppressed
	
	
	==> kernel <==
	 18:42:08 up 32 min,  0 users,  load average: 0.02, 0.04, 0.04
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:41:53 functional-873100 kubelet[5867]: E0408 18:41:53.395849    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 24m3.876337145s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:41:54 functional-873100 kubelet[5867]: E0408 18:41:54.769235    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:41:58 functional-873100 kubelet[5867]: E0408 18:41:58.396263    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 24m8.876749161s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:42:01 functional-873100 kubelet[5867]: E0408 18:42:01.771278    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:42:02 functional-873100 kubelet[5867]: E0408 18:42:02.649395    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/kube-scheduler-functional-873100.18346ab28d526cec\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{kube-scheduler-functional-873100.18346ab28d526cec  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:kube-scheduler-functional-873100,UID:807b787af7a083ae3ea608e19aade7a5,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Unhealthy,Message:Readiness probe failed: Get \"https://127.0.0.1:10259/readyz\": dial tcp 127.0.0.1:10259: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:50.806936812 +0000 UTC m=+198.300576766,LastTimestamp:2025
-04-08 18:17:54.80981393 +0000 UTC m=+202.303453884,Count:6,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	Apr 08 18:42:02 functional-873100 kubelet[5867]: I0408 18:42:02.720662    5867 status_manager.go:890] "Failed to get status for pod" podUID="d64fc418bb9f3cd367047ed11adace28" pod="kube-system/kube-apiserver-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:42:02 functional-873100 kubelet[5867]: I0408 18:42:02.721811    5867 status_manager.go:890] "Failed to get status for pod" podUID="3e3dca81cfb79388693d0fc85f2bc1ca" pod="kube-system/etcd-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:42:02 functional-873100 kubelet[5867]: I0408 18:42:02.723022    5867 status_manager.go:890] "Failed to get status for pod" podUID="807b787af7a083ae3ea608e19aade7a5" pod="kube-system/kube-scheduler-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:42:03 functional-873100 kubelet[5867]: E0408 18:42:03.396989    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 24m13.877460756s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.785764    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786206    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786724    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786765    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786789    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786810    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: I0408 18:42:07.786824    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.785614    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.786941    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.787558    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.787591    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.787607    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.794865    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.794912    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.795573    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"
	Apr 08 18:42:07 functional-873100 kubelet[5867]: E0408 18:42:07.888234    5867 kubelet.go:3018] "Container runtime not ready" runtimeReady="RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:40:07.034919   13056 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:40:07.073023   13056 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:40:07.117017   13056 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:40:07.149637   13056 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:40:07.185631   13056 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.334237   13056 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.413598   13056 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.485529   13056 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.6783141s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/parallel/ServiceCmdConnect (181.19s)

                                                
                                    
x
+
TestFunctional/parallel/PersistentVolumeClaim (554.22s)

                                                
                                                
=== RUN   TestFunctional/parallel/PersistentVolumeClaim
=== PAUSE TestFunctional/parallel/PersistentVolumeClaim

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PersistentVolumeClaim
functional_test_pvc_test.go:44: (dbg) TestFunctional/parallel/PersistentVolumeClaim: waiting 4m0s for pods matching "integration-test=storage-provisioner" in namespace "kube-system" ...
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
E0408 18:31:52.225178    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
helpers_test.go:329: TestFunctional/parallel/PersistentVolumeClaim: WARNING: pod list for "kube-system" "integration-test=storage-provisioner" returned: Get "https://172.22.46.213:8441/api/v1/namespaces/kube-system/pods?labelSelector=integration-test%3Dstorage-provisioner": context deadline exceeded
functional_test_pvc_test.go:44: ***** TestFunctional/parallel/PersistentVolumeClaim: pod "integration-test=storage-provisioner" failed to start within 4m0s: context deadline exceeded ****
functional_test_pvc_test.go:44: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
functional_test_pvc_test.go:44: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.8918736s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
functional_test_pvc_test.go:44: status error: exit status 2 (may be ok)
functional_test_pvc_test.go:44: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
functional_test_pvc_test.go:45: failed waiting for storage-provisioner: integration-test=storage-provisioner within 4m0s: context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.5990932s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/parallel/PersistentVolumeClaim FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/PersistentVolumeClaim]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (4m36.4213278s)
helpers_test.go:252: TestFunctional/parallel/PersistentVolumeClaim logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	|  Command  |                                                Args                                                 |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| service   | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | --namespace=default --https                                                                         |                   |                   |         |                     |                     |
	|           | --url hello-node                                                                                    |                   |                   |         |                     |                     |
	| cp        | functional-873100 cp functional-873100:/home/docker/cp-test.txt                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|           | C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalparallelCpCmd1674743440\001\cp-test.txt |                   |                   |         |                     |                     |
	| service   | functional-873100                                                                                   | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | service hello-node --url                                                                            |                   |                   |         |                     |                     |
	|           | --format={{.IP}}                                                                                    |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|           | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|           | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| service   | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | hello-node --url                                                                                    |                   |                   |         |                     |                     |
	| cp        | functional-873100 cp                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:31 UTC |
	|           | testdata\cp-test.txt                                                                                |                   |                   |         |                     |                     |
	|           | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start     | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|           | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|           | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|           | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start     | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|           | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|           | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh echo                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | hello                                                                                               |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh cat                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | /etc/hostname                                                                                       |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| addons    | functional-873100 addons list                                                                       | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| addons    | functional-873100 addons list                                                                       | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | -o json                                                                                             |                   |                   |         |                     |                     |
	| dashboard | --url --port 36195                                                                                  | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | -p functional-873100                                                                                |                   |                   |         |                     |                     |
	|           | --alsologtostderr -v=1                                                                              |                   |                   |         |                     |                     |
	| license   |                                                                                                     | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| ssh       | functional-873100 ssh sudo                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | systemctl is-active crio                                                                            |                   |                   |         |                     |                     |
	| image     | functional-873100 image load --daemon                                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:32 UTC | 08 Apr 25 18:33 UTC |
	|           | kicbase/echo-server:functional-873100                                                               |                   |                   |         |                     |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| image     | functional-873100 image ls                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:33 UTC | 08 Apr 25 18:34 UTC |
	| image     | functional-873100 image load --daemon                                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:34 UTC | 08 Apr 25 18:35 UTC |
	|           | kicbase/echo-server:functional-873100                                                               |                   |                   |         |                     |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| image     | functional-873100 image ls                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC |                     |
	| ssh       | functional-873100 ssh sudo cat                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|           | /etc/ssl/certs/7904.pem                                                                             |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh sudo cat                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC |                     |
	|           | /usr/share/ca-certificates/7904.pem                                                                 |                   |                   |         |                     |                     |
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:31:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	
	==> Docker <==
	Apr 08 18:39:06 functional-873100 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e'"
	Apr 08 18:39:06 functional-873100 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:39:06 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924'"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="error getting RW layer size for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:39:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d'"
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:39:08Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[ +28.267561] kauditd_printk_skb: 10 callbacks suppressed
	[Apr 8 18:14] systemd-fstab-generator[3877]: Ignoring "noauto" option for root device
	[  +0.672076] systemd-fstab-generator[3911]: Ignoring "noauto" option for root device
	[  +0.280795] systemd-fstab-generator[3923]: Ignoring "noauto" option for root device
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	[Apr 8 18:36] systemd-fstab-generator[14031]: Ignoring "noauto" option for root device
	[Apr 8 18:37] systemd-fstab-generator[14218]: Ignoring "noauto" option for root device
	[  +0.130934] kauditd_printk_skb: 12 callbacks suppressed
	
	
	==> kernel <==
	 18:40:07 up 30 min,  0 users,  load average: 0.19, 0.06, 0.05
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.022058    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.022111    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028046    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028085    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028149    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028282    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028371    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028427    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: I0408 18:40:07.028441    5867 image_gc_manager.go:214] "Failed to monitor images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028550    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028572    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: I0408 18:40:07.028599    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028617    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.028636    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.030263    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.030577    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.033058    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?]"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.202985    5867 log.go:32] "Version from runtime service failed" err="rpc error: code = Unknown desc = failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: I0408 18:40:07.203104    5867 setters.go:602] "Node became not ready" node="functional-873100" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-04-08T18:40:07Z","lastTransitionTime":"2025-04-08T18:40:07Z","reason":"KubeletNotReady","message":"[container runtime is down, PLEG is not healthy: pleg was last seen active 22m17.683585815s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @-\u003e/run/docker.sock: read: connection reset by peer]"}
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.206114    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:40:07Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:40:07Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:40:07Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:40:07Z\\\",\\\"lastTransitionTime\\\":\\\"2025-04-08T18:40:07Z\\\",\\\"message\\\":\\\"[container runtime is down, PLEG is not healthy: pleg was last seen active 22m17.683585815s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to
get docker version: failed to get docker version from dockerd: error during connect: Get \\\\\\\"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\\\\\\\": read unix @-\\\\u003e/run/docker.sock: read: connection reset by peer]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"nodeInfo\\\":{\\\"containerRuntimeVersion\\\":\\\"docker://Unknown\\\"}}}\" for node \"functional-873100\": Patch \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100/status?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.207562    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.208675    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.209502    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.212041    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:40:07 functional-873100 kubelet[5867]: E0408 18:40:07.212080    5867 kubelet_node_status.go:536] "Unable to update node status" err="update node status exceeds retry count"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:36:06.222265   10848 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:36:06.264999   10848 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:36:06.323577   10848 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:37:06.436642   10848 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:37:06.494036   10848 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:38:06.670443   10848 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:39:06.784332   10848 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:39:06.829658   10848 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.2896258s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/parallel/PersistentVolumeClaim (554.22s)

                                                
                                    
x
+
TestFunctional/parallel/MySQL (172.41s)

                                                
                                                
=== RUN   TestFunctional/parallel/MySQL
=== PAUSE TestFunctional/parallel/MySQL

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MySQL
functional_test.go:1810: (dbg) Run:  kubectl --context functional-873100 replace --force -f testdata\mysql.yaml
functional_test.go:1810: (dbg) Non-zero exit: kubectl --context functional-873100 replace --force -f testdata\mysql.yaml: exit status 1 (4.2351488s)

                                                
                                                
** stderr ** 
	error when deleting "testdata\\mysql.yaml": Delete "https://172.22.46.213:8441/api/v1/namespaces/default/services/mysql": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error when deleting "testdata\\mysql.yaml": Delete "https://172.22.46.213:8441/apis/apps/v1/namespaces/default/deployments/mysql": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1812: failed to kubectl replace mysql: args "kubectl --context functional-873100 replace --force -f testdata\\mysql.yaml" failed: exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.1048316s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/parallel/MySQL FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/MySQL]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (2m23.0149458s)
helpers_test.go:252: TestFunctional/parallel/MySQL logs: 
-- stdout --
	
	==> Audit <==
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	|  Command   |                                 Args                                  |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| tunnel     | functional-873100 tunnel                                              | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| addons     | functional-873100 addons list                                         | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| addons     | functional-873100 addons list                                         | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|            | -o json                                                               |                   |                   |         |                     |                     |
	| dashboard  | --url --port 36195                                                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | -p functional-873100                                                  |                   |                   |         |                     |                     |
	|            | --alsologtostderr -v=1                                                |                   |                   |         |                     |                     |
	| license    |                                                                       | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| ssh        | functional-873100 ssh sudo                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|            | systemctl is-active crio                                              |                   |                   |         |                     |                     |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:32 UTC | 08 Apr 25 18:33 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:33 UTC | 08 Apr 25 18:34 UTC |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:34 UTC | 08 Apr 25 18:35 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:36 UTC |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/7904.pem                                               |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /usr/share/ca-certificates/7904.pem                                   |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/51391683.0                                             |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:35 UTC |
	|            | /etc/ssl/certs/79042.pem                                              |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:35 UTC | 08 Apr 25 18:36 UTC |
	|            | /usr/share/ca-certificates/79042.pem                                  |                   |                   |         |                     |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC | 08 Apr 25 18:36 UTC |
	|            | /etc/ssl/certs/3ec20f2e.0                                             |                   |                   |         |                     |                     |
	| image      | functional-873100 image load --daemon                                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC | 08 Apr 25 18:37 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| docker-env | functional-873100 docker-env                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:36 UTC |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:37 UTC | 08 Apr 25 18:38 UTC |
	| image      | functional-873100 image save kicbase/echo-server:functional-873100    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:38 UTC | 08 Apr 25 18:39 UTC |
	|            | C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image rm                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:39 UTC | 08 Apr 25 18:40 UTC |
	|            | kicbase/echo-server:functional-873100                                 |                   |                   |         |                     |                     |
	|            | --alsologtostderr                                                     |                   |                   |         |                     |                     |
	| image      | functional-873100 image ls                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:40 UTC |                     |
	| ssh        | functional-873100 ssh sudo cat                                        | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:40 UTC | 08 Apr 25 18:40 UTC |
	|            | /etc/test/nested/copy/7904/hosts                                      |                   |                   |         |                     |                     |
	|------------|-----------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:31:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	
	==> Docker <==
	Apr 08 18:42:07 functional-873100 systemd[1]: Failed to start Docker Application Container Engine.
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cd74820d98d9a3d3bb3320c06c1419b70b9548eaa07a3d4733c4e1d70d42e2cd'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/d5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'd5fc4325314ec83942790a9bfb250fbc3410cf1fdf32c0e15b7853412095a46e'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:42:07 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:42:07Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:42:07 functional-873100 systemd[1]: docker.service: Scheduled restart job, restart counter is at 4.
	Apr 08 18:42:07 functional-873100 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 18:42:07 functional-873100 systemd[1]: Starting Docker Application Container Engine...
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:42:10Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	[Apr 8 18:36] systemd-fstab-generator[14031]: Ignoring "noauto" option for root device
	[Apr 8 18:37] systemd-fstab-generator[14218]: Ignoring "noauto" option for root device
	[  +0.130934] kauditd_printk_skb: 12 callbacks suppressed
	[Apr 8 18:41] systemd-fstab-generator[15444]: Ignoring "noauto" option for root device
	[  +0.178916] kauditd_printk_skb: 12 callbacks suppressed
	[Apr 8 18:42] systemd-fstab-generator[15781]: Ignoring "noauto" option for root device
	[  +0.178344] kauditd_printk_skb: 12 callbacks suppressed
	
	
	==> kernel <==
	 18:43:08 up 33 min,  0 users,  load average: 0.01, 0.03, 0.03
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:43:02 functional-873100 kubelet[5867]: I0408 18:43:02.719908    5867 status_manager.go:890] "Failed to get status for pod" podUID="807b787af7a083ae3ea608e19aade7a5" pod="kube-system/kube-scheduler-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:02 functional-873100 kubelet[5867]: I0408 18:43:02.721438    5867 status_manager.go:890] "Failed to get status for pod" podUID="d64fc418bb9f3cd367047ed11adace28" pod="kube-system/kube-apiserver-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:03 functional-873100 kubelet[5867]: E0408 18:43:03.409589    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 25m13.890062339s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:43:04 functional-873100 kubelet[5867]: E0408 18:43:04.794258    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.105980    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.106172    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.106198    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122244    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122362    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122506    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122540    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: I0408 18:43:08.122554    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122578    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122601    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.122817    5867 log.go:32] "Version from runtime service failed" err="rpc error: code = Unknown desc = failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: I0408 18:43:08.123693    5867 setters.go:602] "Node became not ready" node="functional-873100" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-04-08T18:43:08Z","lastTransitionTime":"2025-04-08T18:43:08Z","reason":"KubeletNotReady","message":"[container runtime is down, PLEG is not healthy: pleg was last seen active 25m18.604050031s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @-\u003e/run/docker.sock: read: connection reset by peer]"}
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.137271    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.137584    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.140046    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:43:08Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:43:08Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:43:08Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-04-08T18:43:08Z\\\",\\\"lastTransitionTime\\\":\\\"2025-04-08T18:43:08Z\\\",\\\"message\\\":\\\"[container runtime is down, PLEG is not healthy: pleg was last seen active 25m18.604050031s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to
get docker version: failed to get docker version from dockerd: error during connect: Get \\\\\\\"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\\\\\\\": read unix @-\\\\u003e/run/docker.sock: read: connection reset by peer]\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"nodeInfo\\\":{\\\"containerRuntimeVersion\\\":\\\"docker://Unknown\\\"}}}\" for node \"functional-873100\": Patch \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100/status?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.140898    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.142984    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.145163    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.146090    5867 kubelet_node_status.go:549] "Error updating node status, will retry" err="error getting node \"functional-873100\": Get \"https://control-plane.minikube.internal:8441/api/v1/nodes/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.146160    5867 kubelet_node_status.go:536] "Unable to update node status" err="update node status exceeds retry count"
	Apr 08 18:43:08 functional-873100 kubelet[5867]: E0408 18:43:08.321887    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/etcd-functional-873100.18346ab29d6d446e\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{etcd-functional-873100.18346ab29d6d446e  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:etcd-functional-873100,UID:3e3dca81cfb79388693d0fc85f2bc1ca,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Unhealthy,Message:Readiness probe failed: Get \"http://127.0.0.1:2381/readyz\": dial tcp 127.0.0.1:2381: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:51.077131374 +0000 UTC m=+198.570771328,LastTimestamp:2025-04-08 18:17:55.078863793 +0000 UTC m=+202.
572503847,Count:6,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:41:07.334237    7568 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.438927    7568 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.503100    7568 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.547428    7568 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.580338    7568 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:41:07.608367    7568 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:42:07.766858    7568 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:42:07.850817    7568 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (13.0343809s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/parallel/MySQL (172.41s)

                                                
                                    
x
+
TestFunctional/parallel/NodeLabels (301.41s)

                                                
                                                
=== RUN   TestFunctional/parallel/NodeLabels
=== PAUSE TestFunctional/parallel/NodeLabels

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NodeLabels
functional_test.go:236: (dbg) Run:  kubectl --context functional-873100 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'"
functional_test.go:236: (dbg) Non-zero exit: kubectl --context functional-873100 get nodes --output=go-template "--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": exit status 1 (2.1818095s)

                                                
                                                
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
functional_test.go:238: failed to 'kubectl get nodes' with args "kubectl --context functional-873100 get nodes --output=go-template \"--template='{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'\"": exit status 1
functional_test.go:244: expected to have label "minikube.k8s.io/commit" in node labels but got : 
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
functional_test.go:244: expected to have label "minikube.k8s.io/version" in node labels but got : 
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
functional_test.go:244: expected to have label "minikube.k8s.io/updated_at" in node labels but got : 
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
functional_test.go:244: expected to have label "minikube.k8s.io/name" in node labels but got : 
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
functional_test.go:244: expected to have label "minikube.k8s.io/primary" in node labels but got : 
-- stdout --
	'Error executing template: template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range. Printing more information for debugging the template:
		template was:
			'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'
		raw data was:
			{"apiVersion":"v1","items":[],"kind":"List","metadata":{"resourceVersion":""}}
		object given to template engine was:
			map[apiVersion:v1 items:[] kind:List metadata:map[resourceVersion:]]
	

                                                
                                                
-- /stdout --
** stderr ** 
	Unable to connect to the server: dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.
	error executing template "'{{range $k, $v := (index .items 0).metadata.labels}}{{$k}} {{end}}'": template: output:1:20: executing "output" at <index .items 0>: error calling index: reflect: slice index out of range

                                                
                                                
** /stderr **
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p functional-873100 -n functional-873100: exit status 2 (12.0013982s)

                                                
                                                
-- stdout --
	Running

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 2 (may be ok)
helpers_test.go:244: <<< TestFunctional/parallel/NodeLabels FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestFunctional/parallel/NodeLabels]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs -n 25: (4m34.5956694s)
helpers_test.go:252: TestFunctional/parallel/NodeLabels logs: 
-- stdout --
	
	==> Audit <==
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	|  Command  |                                                Args                                                 |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| service   | functional-873100 service list                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	| ssh       | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|           | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|           | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| service   | functional-873100 service list                                                                      | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | -o json                                                                                             |                   |                   |         |                     |                     |
	| service   | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | --namespace=default --https                                                                         |                   |                   |         |                     |                     |
	|           | --url hello-node                                                                                    |                   |                   |         |                     |                     |
	| cp        | functional-873100 cp functional-873100:/home/docker/cp-test.txt                                     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|           | C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalparallelCpCmd1674743440\001\cp-test.txt |                   |                   |         |                     |                     |
	| service   | functional-873100                                                                                   | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | service hello-node --url                                                                            |                   |                   |         |                     |                     |
	|           | --format={{.IP}}                                                                                    |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:30 UTC |
	|           | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|           | /home/docker/cp-test.txt                                                                            |                   |                   |         |                     |                     |
	| service   | functional-873100 service                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | hello-node --url                                                                                    |                   |                   |         |                     |                     |
	| cp        | functional-873100 cp                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC | 08 Apr 25 18:31 UTC |
	|           | testdata\cp-test.txt                                                                                |                   |                   |         |                     |                     |
	|           | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start     | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:30 UTC |                     |
	|           | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|           | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|           | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh -n                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | functional-873100 sudo cat                                                                          |                   |                   |         |                     |                     |
	|           | /tmp/does/not/exist/cp-test.txt                                                                     |                   |                   |         |                     |                     |
	| start     | -p functional-873100                                                                                | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --dry-run --memory                                                                                  |                   |                   |         |                     |                     |
	|           | 250MB --alsologtostderr                                                                             |                   |                   |         |                     |                     |
	|           | --driver=hyperv                                                                                     |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh echo                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | hello                                                                                               |                   |                   |         |                     |                     |
	| ssh       | functional-873100 ssh cat                                                                           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | /etc/hostname                                                                                       |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| tunnel    | functional-873100 tunnel                                                                            | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| addons    | functional-873100 addons list                                                                       | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| addons    | functional-873100 addons list                                                                       | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	|           | -o json                                                                                             |                   |                   |         |                     |                     |
	| dashboard | --url --port 36195                                                                                  | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | -p functional-873100                                                                                |                   |                   |         |                     |                     |
	|           | --alsologtostderr -v=1                                                                              |                   |                   |         |                     |                     |
	| license   |                                                                                                     | minikube          | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC | 08 Apr 25 18:31 UTC |
	| ssh       | functional-873100 ssh sudo                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:31 UTC |                     |
	|           | systemctl is-active crio                                                                            |                   |                   |         |                     |                     |
	| image     | functional-873100 image load --daemon                                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:32 UTC | 08 Apr 25 18:33 UTC |
	|           | kicbase/echo-server:functional-873100                                                               |                   |                   |         |                     |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	| image     | functional-873100 image ls                                                                          | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:33 UTC | 08 Apr 25 18:34 UTC |
	| image     | functional-873100 image load --daemon                                                               | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:34 UTC |                     |
	|           | kicbase/echo-server:functional-873100                                                               |                   |                   |         |                     |                     |
	|           | --alsologtostderr                                                                                   |                   |                   |         |                     |                     |
	|-----------|-----------------------------------------------------------------------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:31:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	
	==> Docker <==
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'cdf6750a126aec9b27f3bcde394f939c0f1d3b4e8fac18ac827276a70b84257f'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '4fe4cd24d23ee644646e8a4aca5062903dc0472afa6f68be46bb0d92b1aeeaee'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'c4aa5f078886c96a860b121bff27bab9eebf562c46cdf1bf510e50658798b924'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID 'fc223ee059ea04497a5196ea5100b11b478c898dea5d3388523b474a9cb56718'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peerFailed to get image list from docker"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '8a4b204b04c0c54cf6a18dbce48349546af57d47ef025706a7dca5901c36ca3d'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9878e1198761ece38ce3d8e2236b00afaaef8b514af17a43746145774b701a91'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3cfe19f1aff688d03feac4d50124088c5c4cabc745b0f9bc2844deb9260c14ee'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '3572214a0635128383f9211931b82dde49c32d2fc8d8573496954bfa76c67792'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '2d95f7cf40fd6e00237b30e26674bfe20d742739a6d675120b32f2e8a76b6074'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '6045f448d7d0f9565f68f4318ec1669de05d4366693699f09aba3801048cec2d'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '24209531dd610da9df152926277c1bbfbd649093fdb257bf5c8b07ebc637738e'"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="error getting RW layer size for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a': error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a/json?size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:37:06 functional-873100 cri-dockerd[4661]: time="2025-04-08T18:37:06Z" level=error msg="Set backoffDuration to : 1m0s for container ID '9c19fe0a551b3448a04c97a58be49ab1aaaa408f59d71bc83711e2510d6e6a4a'"
	
	
	==> container status <==
	command /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a" failed with error: /bin/bash -c "sudo `which crictl || echo crictl` ps -a || sudo docker ps -a": Process exited with status 1
	stdout:
	
	stderr:
	time="2025-04-08T18:37:08Z" level=fatal msg="validate service connection: validate CRI v1 runtime API for endpoint \"unix:///var/run/cri-dockerd.sock\": rpc error: code = DeadlineExceeded desc = context deadline exceeded"
	error during connect: Get "http://%2Fvar%2Frun%2Fdocker.sock/v1.47/containers/json?all=1": read unix @->/run/docker.sock: read: connection reset by peer
	
	
	==> describe nodes <==
	command /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig" failed with error: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl describe nodes --kubeconfig=/var/lib/minikube/kubeconfig": Process exited with status 1
	stdout:
	
	stderr:
	The connection to the server localhost:8441 was refused - did you specify the right host or port?
	
	
	==> dmesg <==
	[ +28.267561] kauditd_printk_skb: 10 callbacks suppressed
	[Apr 8 18:14] systemd-fstab-generator[3877]: Ignoring "noauto" option for root device
	[  +0.672076] systemd-fstab-generator[3911]: Ignoring "noauto" option for root device
	[  +0.280795] systemd-fstab-generator[3923]: Ignoring "noauto" option for root device
	[  +0.293834] systemd-fstab-generator[3937]: Ignoring "noauto" option for root device
	[  +5.440526] kauditd_printk_skb: 89 callbacks suppressed
	[  +7.933629] systemd-fstab-generator[4609]: Ignoring "noauto" option for root device
	[  +0.219703] systemd-fstab-generator[4622]: Ignoring "noauto" option for root device
	[  +0.216904] systemd-fstab-generator[4634]: Ignoring "noauto" option for root device
	[  +0.286524] systemd-fstab-generator[4649]: Ignoring "noauto" option for root device
	[  +0.958166] systemd-fstab-generator[4818]: Ignoring "noauto" option for root device
	[  +2.919233] kauditd_printk_skb: 193 callbacks suppressed
	[  +1.805984] systemd-fstab-generator[5860]: Ignoring "noauto" option for root device
	[  +5.996373] kauditd_printk_skb: 55 callbacks suppressed
	[ +13.483989] systemd-fstab-generator[6670]: Ignoring "noauto" option for root device
	[  +0.184853] kauditd_printk_skb: 35 callbacks suppressed
	[Apr 8 18:17] systemd-fstab-generator[7957]: Ignoring "noauto" option for root device
	[  +0.149585] kauditd_printk_skb: 12 callbacks suppressed
	[  +0.469346] systemd-fstab-generator[7993]: Ignoring "noauto" option for root device
	[  +0.260256] systemd-fstab-generator[8005]: Ignoring "noauto" option for root device
	[  +0.309575] systemd-fstab-generator[8019]: Ignoring "noauto" option for root device
	[  +5.425847] kauditd_printk_skb: 89 callbacks suppressed
	[Apr 8 18:36] systemd-fstab-generator[14031]: Ignoring "noauto" option for root device
	[Apr 8 18:37] systemd-fstab-generator[14218]: Ignoring "noauto" option for root device
	[  +0.130934] kauditd_printk_skb: 12 callbacks suppressed
	
	
	==> kernel <==
	 18:39:07 up 29 min,  0 users,  load average: 0.00, 0.02, 0.04
	Linux functional-873100 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kubelet <==
	Apr 08 18:38:55 functional-873100 kubelet[5867]: E0408 18:38:55.170890    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/kube-scheduler-functional-873100.18346ab28d526cec\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{kube-scheduler-functional-873100.18346ab28d526cec  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:kube-scheduler-functional-873100,UID:807b787af7a083ae3ea608e19aade7a5,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Unhealthy,Message:Readiness probe failed: Get \"https://127.0.0.1:10259/readyz\": dial tcp 127.0.0.1:10259: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:50.806936812 +0000 UTC m=+198.300576766,LastTimestamp:2025
-04-08 18:17:53.809217825 +0000 UTC m=+201.302857779,Count:5,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	Apr 08 18:38:55 functional-873100 kubelet[5867]: E0408 18:38:55.171091    5867 event.go:307] "Unable to write event (retry limit exceeded!)" event="&Event{ObjectMeta:{kube-scheduler-functional-873100.18346ab340459921  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:kube-scheduler-functional-873100,UID:807b787af7a083ae3ea608e19aade7a5,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Unhealthy,Message:Readiness probe failed: Get \"https://127.0.0.1:10259/readyz\": dial tcp 127.0.0.1:10259: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:53.809217825 +0000 UTC m=+201.302857779,LastTimestamp:2025-04-08 18:17:53.809217825 +0000 UTC m=+201.302857779,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,
}"
	Apr 08 18:38:55 functional-873100 kubelet[5867]: E0408 18:38:55.172001    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/etcd-functional-873100.18346ab29d6d446e\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{etcd-functional-873100.18346ab29d6d446e  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:etcd-functional-873100,UID:3e3dca81cfb79388693d0fc85f2bc1ca,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Unhealthy,Message:Readiness probe failed: Get \"http://127.0.0.1:2381/readyz\": dial tcp 127.0.0.1:2381: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:51.077131374 +0000 UTC m=+198.570771328,LastTimestamp:2025-04-08 18:17:54.079136187 +0000 UTC m=+201.
572776141,Count:5,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	Apr 08 18:38:58 functional-873100 kubelet[5867]: E0408 18:38:58.362079    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 21m8.842552487s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/var/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:38:59 functional-873100 kubelet[5867]: E0408 18:38:59.708035    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:39:02 functional-873100 kubelet[5867]: I0408 18:39:02.719914    5867 status_manager.go:890] "Failed to get status for pod" podUID="3e3dca81cfb79388693d0fc85f2bc1ca" pod="kube-system/etcd-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/etcd-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:39:02 functional-873100 kubelet[5867]: I0408 18:39:02.721060    5867 status_manager.go:890] "Failed to get status for pod" podUID="807b787af7a083ae3ea608e19aade7a5" pod="kube-system/kube-scheduler-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-scheduler-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:39:02 functional-873100 kubelet[5867]: I0408 18:39:02.722218    5867 status_manager.go:890] "Failed to get status for pod" podUID="d64fc418bb9f3cd367047ed11adace28" pod="kube-system/kube-apiserver-functional-873100" err="Get \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/pods/kube-apiserver-functional-873100\": dial tcp 172.22.46.213:8441: connect: connection refused"
	Apr 08 18:39:03 functional-873100 kubelet[5867]: E0408 18:39:03.363108    5867 kubelet.go:2412] "Skipping pod synchronization" err="[container runtime is down, PLEG is not healthy: pleg was last seen active 21m13.843594966s ago; threshold is 3m0s, container runtime not ready: RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/var/run/docker.sock: read: connection reset by peer]"
	Apr 08 18:39:03 functional-873100 kubelet[5867]: E0408 18:39:03.967708    5867 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://control-plane.minikube.internal:8441/api/v1/namespaces/kube-system/events/etcd-functional-873100.18346ab29d6d446e\": dial tcp 172.22.46.213:8441: connect: connection refused" event="&Event{ObjectMeta:{etcd-functional-873100.18346ab29d6d446e  kube-system    0 0001-01-01 00:00:00 +0000 UTC <nil> <nil> map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:kube-system,Name:etcd-functional-873100,UID:3e3dca81cfb79388693d0fc85f2bc1ca,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Unhealthy,Message:Readiness probe failed: Get \"http://127.0.0.1:2381/readyz\": dial tcp 127.0.0.1:2381: connect: connection refused,Source:EventSource{Component:kubelet,Host:functional-873100,},FirstTimestamp:2025-04-08 18:17:51.077131374 +0000 UTC m=+198.570771328,LastTimestamp:2025-04-08 18:17:54.079136187 +0000 UTC m=+201.
572776141,Count:5,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:functional-873100,}"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.711243    5867 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://control-plane.minikube.internal:8441/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/functional-873100?timeout=10s\": dial tcp 172.22.46.213:8441: connect: connection refused" interval="7s"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789547    5867 log.go:32] "ListPodSandbox with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789639    5867 kuberuntime_sandbox.go:305] "Failed to list pod sandboxes" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789716    5867 generic.go:256] "GenericPLEG: Unable to retrieve pods" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dpodsandbox%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789748    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789778    5867 container_log_manager.go:197] "Failed to rotate container logs" err="failed to list containers: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789853    5867 log.go:32] "ImageFsInfo from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789876    5867 eviction_manager.go:292] "Eviction manager: failed to get summary stats" err="failed to get imageFs stats: rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json?all=1&shared-size=1\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789915    5867 log.go:32] "ListImages with filter from image service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer" filter="nil"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.789935    5867 kuberuntime_image.go:117] "Failed to list images" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: I0408 18:39:06.789948    5867 image_gc_manager.go:222] "Failed to update image list" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/images/json\": read unix @->/var/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.914976    5867 kubelet.go:3018] "Container runtime not ready" runtimeReady="RuntimeReady=false reason:DockerDaemonNotReady message:docker: failed to get docker version: failed to get docker version from dockerd: error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/version\": read unix @->/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.915065    5867 log.go:32] "ListContainers with filter from runtime service failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/run/docker.sock: read: connection reset by peer" filter="&ContainerFilter{Id:,State:nil,PodSandboxId:,LabelSelector:map[string]string{},}"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.915102    5867 kuberuntime_container.go:508] "ListContainers failed" err="rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/run/docker.sock: read: connection reset by peer"
	Apr 08 18:39:06 functional-873100 kubelet[5867]: E0408 18:39:06.915568    5867 kubelet.go:1529] "Container garbage collection failed" err="[rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/var/run/docker.sock: read: connection reset by peer, rpc error: code = Unknown desc = error during connect: Get \"http://%2Fvar%2Frun%2Fdocker.sock/v1.43/containers/json?all=1&filters=%7B%22label%22%3A%7B%22io.kubernetes.docker.type%3Dcontainer%22%3Atrue%7D%7D\": read unix @->/run/docker.sock: read: connection reset by peer]"
	

                                                
                                                
-- /stdout --
** stderr ** 
	E0408 18:35:05.928324    6748 logs.go:279] Failed to list containers for "kube-apiserver": docker: docker ps -a --filter=name=k8s_kube-apiserver --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:35:05.985301    6748 logs.go:279] Failed to list containers for "etcd": docker: docker ps -a --filter=name=k8s_etcd --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:35:06.022639    6748 logs.go:279] Failed to list containers for "coredns": docker: docker ps -a --filter=name=k8s_coredns --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:35:06.069439    6748 logs.go:279] Failed to list containers for "kube-scheduler": docker: docker ps -a --filter=name=k8s_kube-scheduler --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:36:06.222265    6748 logs.go:279] Failed to list containers for "kube-proxy": docker: docker ps -a --filter=name=k8s_kube-proxy --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:36:06.265283    6748 logs.go:279] Failed to list containers for "kube-controller-manager": docker: docker ps -a --filter=name=k8s_kube-controller-manager --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:36:06.318217    6748 logs.go:279] Failed to list containers for "kindnet": docker: docker ps -a --filter=name=k8s_kindnet --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?
	E0408 18:37:06.452151    6748 logs.go:279] Failed to list containers for "storage-provisioner": docker: docker ps -a --filter=name=k8s_storage-provisioner --format={{.ID}}: Process exited with status 1
	stdout:
	
	stderr:
	Cannot connect to the Docker daemon at unix:///var/run/docker.sock. Is the docker daemon running?

                                                
                                                
** /stderr **
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100
helpers_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p functional-873100 -n functional-873100: exit status 2 (12.6015703s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
helpers_test.go:254: status error: exit status 2 (may be ok)
helpers_test.go:256: "functional-873100" apiserver is not running, skipping kubectl commands (state="Stopped")
--- FAIL: TestFunctional/parallel/NodeLabels (301.41s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/DeployApp (2.17s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/DeployApp
functional_test.go:1456: (dbg) Run:  kubectl --context functional-873100 create deployment hello-node --image=registry.k8s.io/echoserver:1.8
functional_test.go:1456: (dbg) Non-zero exit: kubectl --context functional-873100 create deployment hello-node --image=registry.k8s.io/echoserver:1.8: exit status 1 (2.1599095s)

                                                
                                                
** stderr ** 
	error: failed to create deployment: Post "https://172.22.46.213:8441/apis/apps/v1/namespaces/default/deployments?fieldManager=kubectl-create&fieldValidation=Strict": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.

                                                
                                                
** /stderr **
functional_test.go:1460: failed to create hello-node deployment with this command "kubectl --context functional-873100 create deployment hello-node --image=registry.k8s.io/echoserver:1.8": exit status 1.
--- FAIL: TestFunctional/parallel/ServiceCmd/DeployApp (2.17s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/List (8.93s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/List
functional_test.go:1476: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 service list
functional_test.go:1476: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 service list: exit status 103 (8.9328092s)

                                                
                                                
-- stdout --
	* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
	  To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
-- /stdout --
functional_test.go:1478: failed to do service list. args "out/minikube-windows-amd64.exe -p functional-873100 service list" : exit status 103
functional_test.go:1481: expected 'service list' to contain *hello-node* but got -"* The control-plane node functional-873100 apiserver is not running: (state=Stopped)\n  To start a cluster, run: \"minikube start -p functional-873100\"\n"-
--- FAIL: TestFunctional/parallel/ServiceCmd/List (8.93s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/JSONOutput (8.61s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/JSONOutput
functional_test.go:1506: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 service list -o json
functional_test.go:1506: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 service list -o json: exit status 103 (8.6120502s)

                                                
                                                
-- stdout --
	* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
	  To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
-- /stdout --
functional_test.go:1508: failed to list services with json format. args "out/minikube-windows-amd64.exe -p functional-873100 service list -o json": exit status 103
--- FAIL: TestFunctional/parallel/ServiceCmd/JSONOutput (8.61s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/HTTPS (8.62s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/HTTPS
functional_test.go:1526: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 service --namespace=default --https --url hello-node
functional_test.go:1526: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 service --namespace=default --https --url hello-node: exit status 103 (8.6193144s)

                                                
                                                
-- stdout --
	* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
	  To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
-- /stdout --
functional_test.go:1528: failed to get service url. args "out/minikube-windows-amd64.exe -p functional-873100 service --namespace=default --https --url hello-node" : exit status 103
--- FAIL: TestFunctional/parallel/ServiceCmd/HTTPS (8.62s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/Format (8.7s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/Format
functional_test.go:1557: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url --format={{.IP}}
functional_test.go:1557: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url --format={{.IP}}: exit status 103 (8.6984313s)

                                                
                                                
-- stdout --
	* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
	  To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
-- /stdout --
functional_test.go:1559: failed to get service url with custom format. args "out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url --format={{.IP}}": exit status 103
functional_test.go:1565: "* The control-plane node functional-873100 apiserver is not running: (state=Stopped)\n  To start a cluster, run: \"minikube start -p functional-873100\"" is not a valid IP
--- FAIL: TestFunctional/parallel/ServiceCmd/Format (8.70s)

                                                
                                    
x
+
TestFunctional/parallel/ServiceCmd/URL (8.61s)

                                                
                                                
=== RUN   TestFunctional/parallel/ServiceCmd/URL
functional_test.go:1576: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url
functional_test.go:1576: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url: exit status 103 (8.612965s)

                                                
                                                
-- stdout --
	* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
	  To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
-- /stdout --
functional_test.go:1578: failed to get service url. args: "out/minikube-windows-amd64.exe -p functional-873100 service hello-node --url": exit status 103
functional_test.go:1582: found endpoint for hello-node: * The control-plane node functional-873100 apiserver is not running: (state=Stopped)
To start a cluster, run: "minikube start -p functional-873100"
functional_test.go:1586: failed to parse "* The control-plane node functional-873100 apiserver is not running: (state=Stopped)\n  To start a cluster, run: \"minikube start -p functional-873100\"": parse "* The control-plane node functional-873100 apiserver is not running: (state=Stopped)\n  To start a cluster, run: \"minikube start -p functional-873100\"": net/url: invalid control character in URL
--- FAIL: TestFunctional/parallel/ServiceCmd/URL (8.61s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (7.71s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr]
functional_test_tunnel_test.go:154: (dbg) daemon: [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr]
functional_test_tunnel_test.go:190: tunnel command failed with unexpected error: exit code 103. stderr: I0408 18:31:30.898543    9740 out.go:345] Setting OutFile to fd 1320 ...
I0408 18:31:31.011495    9740 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:31:31.011568    9740 out.go:358] Setting ErrFile to fd 1296...
I0408 18:31:31.011568    9740 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:31:31.027121    9740 mustload.go:65] Loading cluster: functional-873100
I0408 18:31:31.027472    9740 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:31:31.028589    9740 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:31:33.336425    9740 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:31:33.336548    9740 main.go:141] libmachine: [stderr =====>] : 
I0408 18:31:33.336548    9740 host.go:66] Checking if "functional-873100" exists ...
I0408 18:31:33.337508    9740 api_server.go:166] Checking apiserver status ...
I0408 18:31:33.350515    9740 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
I0408 18:31:33.350515    9740 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:31:35.627309    9740 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:31:35.627309    9740 main.go:141] libmachine: [stderr =====>] : 
I0408 18:31:35.627454    9740 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:31:38.303290    9740 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:31:38.303398    9740 main.go:141] libmachine: [stderr =====>] : 
I0408 18:31:38.303610    9740 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:31:38.427853    9740 ssh_runner.go:235] Completed: sudo pgrep -xnf kube-apiserver.*minikube.*: (5.0772985s)
W0408 18:31:38.427853    9740 api_server.go:170] stopped: unable to get apiserver pid: sudo pgrep -xnf kube-apiserver.*minikube.*: Process exited with status 1
stdout:

                                                
                                                
stderr:
I0408 18:31:38.432570    9740 out.go:177] * The control-plane node functional-873100 apiserver is not running: (state=Stopped)
I0408 18:31:38.435473    9740 out.go:177]   To start a cluster, run: "minikube start -p functional-873100"

                                                
                                                
stdout: * The control-plane node functional-873100 apiserver is not running: (state=Stopped)
To start a cluster, run: "minikube start -p functional-873100"
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] ...
helpers_test.go:490: unable to find parent, assuming dead: process does not exist
functional_test_tunnel_test.go:194: read stdout failed: read |0: file already closed
functional_test_tunnel_test.go:194: (dbg) [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] stdout:
functional_test_tunnel_test.go:194: read stderr failed: read |0: file already closed
functional_test_tunnel_test.go:194: (dbg) [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] stderr:
functional_test_tunnel_test.go:194: (dbg) stopping [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] ...
helpers_test.go:502: unable to terminate pid 7176: Access is denied.
functional_test_tunnel_test.go:194: (dbg) [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] stdout:
* The control-plane node functional-873100 apiserver is not running: (state=Stopped)
To start a cluster, run: "minikube start -p functional-873100"
functional_test_tunnel_test.go:194: (dbg) [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] stderr:
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/RunSecondTunnel (7.71s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (4.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup
functional_test_tunnel_test.go:212: (dbg) Run:  kubectl --context functional-873100 apply -f testdata\testsvc.yaml
functional_test_tunnel_test.go:212: (dbg) Non-zero exit: kubectl --context functional-873100 apply -f testdata\testsvc.yaml: exit status 1 (4.2183918s)

                                                
                                                
** stderr ** 
	error: error validating "testdata\\testsvc.yaml": error validating data: failed to download openapi: Get "https://172.22.46.213:8441/openapi/v2?timeout=32s": dial tcp 172.22.46.213:8441: connectex: No connection could be made because the target machine actively refused it.; if you choose to ignore these errors, turn validation off with --validate=false

                                                
                                                
** /stderr **
functional_test_tunnel_test.go:214: kubectl --context functional-873100 apply -f testdata\testsvc.yaml failed: exit status 1
--- FAIL: TestFunctional/parallel/TunnelCmd/serial/WaitService/Setup (4.23s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListShort (46.56s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListShort
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListShort

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListShort
functional_test.go:278: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls --format short --alsologtostderr
functional_test.go:278: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls --format short --alsologtostderr: (46.5619054s)
functional_test.go:283: (dbg) Stdout: out/minikube-windows-amd64.exe -p functional-873100 image ls --format short --alsologtostderr:

                                                
                                                
functional_test.go:286: (dbg) Stderr: out/minikube-windows-amd64.exe -p functional-873100 image ls --format short --alsologtostderr:
I0408 18:42:21.755288    4356 out.go:345] Setting OutFile to fd 1392 ...
I0408 18:42:21.834109    4356 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:42:21.834109    4356 out.go:358] Setting ErrFile to fd 1388...
I0408 18:42:21.834109    4356 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:42:21.851087    4356 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:42:21.851087    4356 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:42:21.852086    4356 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:42:24.136704    4356 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:42:24.137312    4356 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:24.149736    4356 ssh_runner.go:195] Run: systemctl --version
I0408 18:42:24.149736    4356 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:42:26.446387    4356 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:42:26.446387    4356 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:26.446471    4356 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:42:29.112929    4356 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:42:29.112929    4356 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:29.112929    4356 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:42:29.212512    4356 ssh_runner.go:235] Completed: systemctl --version: (5.0627361s)
I0408 18:42:29.223585    4356 ssh_runner.go:195] Run: docker images --no-trunc --format "{{json .}}"
I0408 18:43:08.124529    4356 ssh_runner.go:235] Completed: docker images --no-trunc --format "{{json .}}": (38.9006394s)
W0408 18:43:08.124529    4356 cache_images.go:734] Failed to list images for profile functional-873100 docker images: docker images --no-trunc --format "{{json .}}": Process exited with status 1
stdout:

                                                
                                                
stderr:
error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
functional_test.go:292: expected registry.k8s.io/pause to be listed with minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageListShort (46.56s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListTable (46.86s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListTable
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListTable

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListTable
functional_test.go:278: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls --format table --alsologtostderr
functional_test.go:278: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls --format table --alsologtostderr: (46.8637126s)
functional_test.go:283: (dbg) Stdout: out/minikube-windows-amd64.exe -p functional-873100 image ls --format table --alsologtostderr:
|-------|-----|----------|------|
| Image | Tag | Image ID | Size |
|-------|-----|----------|------|
|-------|-----|----------|------|
functional_test.go:286: (dbg) Stderr: out/minikube-windows-amd64.exe -p functional-873100 image ls --format table --alsologtostderr:
I0408 18:43:21.602806    8200 out.go:345] Setting OutFile to fd 1464 ...
I0408 18:43:21.707232    8200 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:21.707232    8200 out.go:358] Setting ErrFile to fd 1368...
I0408 18:43:21.707339    8200 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:21.723046    8200 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:21.723675    8200 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:21.724057    8200 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:23.945105    8200 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:23.945169    8200 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:23.956982    8200 ssh_runner.go:195] Run: systemctl --version
I0408 18:43:23.956982    8200 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:26.229243    8200 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:26.229243    8200 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:26.229243    8200 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:43:28.752031    8200 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:43:28.752031    8200 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:28.753106    8200 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:43:28.855783    8200 ssh_runner.go:235] Completed: systemctl --version: (4.8987627s)
I0408 18:43:28.865574    8200 ssh_runner.go:195] Run: docker images --no-trunc --format "{{json .}}"
I0408 18:44:08.263866    8200 ssh_runner.go:235] Completed: docker images --no-trunc --format "{{json .}}": (39.3979848s)
W0408 18:44:08.263866    8200 cache_images.go:734] Failed to list images for profile functional-873100 docker images: docker images --no-trunc --format "{{json .}}": Process exited with status 1
stdout:

                                                
                                                
stderr:
error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
functional_test.go:292: expected | registry.k8s.io/pause to be listed with minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageListTable (46.86s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListJson (60.12s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListJson
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListJson

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListJson
functional_test.go:278: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls --format json --alsologtostderr
functional_test.go:278: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls --format json --alsologtostderr: (1m0.1164838s)
functional_test.go:283: (dbg) Stdout: out/minikube-windows-amd64.exe -p functional-873100 image ls --format json --alsologtostderr:
[]
functional_test.go:286: (dbg) Stderr: out/minikube-windows-amd64.exe -p functional-873100 image ls --format json --alsologtostderr:
I0408 18:43:08.340128    8724 out.go:345] Setting OutFile to fd 1336 ...
I0408 18:43:08.429692    8724 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:08.429692    8724 out.go:358] Setting ErrFile to fd 1380...
I0408 18:43:08.429692    8724 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:08.453946    8724 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:08.454962    8724 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:08.455956    8724 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:10.881014    8724 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:10.881071    8724 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:10.893818    8724 ssh_runner.go:195] Run: systemctl --version
I0408 18:43:10.893818    8724 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:13.243640    8724 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:13.243701    8724 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:13.243701    8724 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:43:16.042626    8724 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:43:16.043495    8724 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:16.043495    8724 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:43:16.159676    8724 ssh_runner.go:235] Completed: systemctl --version: (5.2658178s)
I0408 18:43:16.169856    8724 ssh_runner.go:195] Run: docker images --no-trunc --format "{{json .}}"
I0408 18:44:08.258495    8724 ssh_runner.go:235] Completed: docker images --no-trunc --format "{{json .}}": (52.0882327s)
W0408 18:44:08.258495    8724 cache_images.go:734] Failed to list images for profile functional-873100 docker images: docker images --no-trunc --format "{{json .}}": Process exited with status 1
stdout:

                                                
                                                
stderr:
error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
functional_test.go:292: expected ["registry.k8s.io/pause to be listed with minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageListJson (60.12s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageListYaml (44.48s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageListYaml
=== PAUSE TestFunctional/parallel/ImageCommands/ImageListYaml

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageListYaml
functional_test.go:278: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls --format yaml --alsologtostderr
functional_test.go:278: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls --format yaml --alsologtostderr: (44.481537s)
functional_test.go:283: (dbg) Stdout: out/minikube-windows-amd64.exe -p functional-873100 image ls --format yaml --alsologtostderr:
[]

                                                
                                                
functional_test.go:286: (dbg) Stderr: out/minikube-windows-amd64.exe -p functional-873100 image ls --format yaml --alsologtostderr:
I0408 18:42:23.795199    9944 out.go:345] Setting OutFile to fd 1408 ...
I0408 18:42:23.894822    9944 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:42:23.894822    9944 out.go:358] Setting ErrFile to fd 1300...
I0408 18:42:23.894822    9944 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:42:23.911553    9944 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:42:23.912236    9944 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:42:23.913018    9944 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:42:26.189258    9944 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:42:26.189258    9944 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:26.202561    9944 ssh_runner.go:195] Run: systemctl --version
I0408 18:42:26.202561    9944 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:42:28.504435    9944 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:42:28.504435    9944 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:28.504669    9944 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:42:31.091724    9944 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:42:31.091796    9944 main.go:141] libmachine: [stderr =====>] : 
I0408 18:42:31.092301    9944 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:42:31.192522    9944 ssh_runner.go:235] Completed: systemctl --version: (4.9897868s)
I0408 18:42:31.202745    9944 ssh_runner.go:195] Run: docker images --no-trunc --format "{{json .}}"
I0408 18:43:08.116053    9944 ssh_runner.go:235] Completed: docker images --no-trunc --format "{{json .}}": (36.9130187s)
W0408 18:43:08.116053    9944 cache_images.go:734] Failed to list images for profile functional-873100 docker images: docker images --no-trunc --format "{{json .}}": Process exited with status 1
stdout:

                                                
                                                
stderr:
error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
functional_test.go:292: expected - registry.k8s.io/pause to be listed with minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageListYaml (44.48s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageBuild (120.39s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageBuild
=== PAUSE TestFunctional/parallel/ImageCommands/ImageBuild

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ImageCommands/ImageBuild
functional_test.go:325: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh pgrep buildkitd
functional_test.go:325: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 ssh pgrep buildkitd: exit status 1 (10.2753843s)

                                                
                                                
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:332: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image build -t localhost/my-image:functional-873100 testdata\build --alsologtostderr
functional_test.go:332: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image build -t localhost/my-image:functional-873100 testdata\build --alsologtostderr: (49.8887s)
functional_test.go:340: (dbg) Stderr: out/minikube-windows-amd64.exe -p functional-873100 image build -t localhost/my-image:functional-873100 testdata\build --alsologtostderr:
I0408 18:43:18.560139    9520 out.go:345] Setting OutFile to fd 1336 ...
I0408 18:43:18.660278    9520 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:18.660352    9520 out.go:358] Setting ErrFile to fd 1380...
I0408 18:43:18.660352    9520 out.go:392] TERM=,COLORTERM=, which probably does not support color
I0408 18:43:18.675571    9520 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:18.697897    9520 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
I0408 18:43:18.698818    9520 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:20.975121    9520 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:20.975121    9520 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:20.987445    9520 ssh_runner.go:195] Run: systemctl --version
I0408 18:43:20.988145    9520 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM functional-873100 ).state
I0408 18:43:23.239304    9520 main.go:141] libmachine: [stdout =====>] : Running

                                                
                                                
I0408 18:43:23.239304    9520 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:23.239370    9520 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM functional-873100 ).networkadapters[0]).ipaddresses[0]
I0408 18:43:25.902123    9520 main.go:141] libmachine: [stdout =====>] : 172.22.46.213

                                                
                                                
I0408 18:43:25.902281    9520 main.go:141] libmachine: [stderr =====>] : 
I0408 18:43:25.902565    9520 sshutil.go:53] new ssh client: &{IP:172.22.46.213 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\functional-873100\id_rsa Username:docker}
I0408 18:43:26.006179    9520 ssh_runner.go:235] Completed: systemctl --version: (5.018695s)
I0408 18:43:26.006736    9520 build_images.go:161] Building image from path: C:\Users\jenkins.minikube3\AppData\Local\Temp\build.497934773.tar
I0408 18:43:26.023579    9520 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build
I0408 18:43:26.060510    9520 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/build/build.497934773.tar
I0408 18:43:26.069092    9520 ssh_runner.go:352] existence check for /var/lib/minikube/build/build.497934773.tar: stat -c "%s %y" /var/lib/minikube/build/build.497934773.tar: Process exited with status 1
stdout:

                                                
                                                
stderr:
stat: cannot statx '/var/lib/minikube/build/build.497934773.tar': No such file or directory
I0408 18:43:26.069092    9520 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\AppData\Local\Temp\build.497934773.tar --> /var/lib/minikube/build/build.497934773.tar (3072 bytes)
I0408 18:43:26.131950    9520 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/build/build.497934773
I0408 18:43:26.161650    9520 ssh_runner.go:195] Run: sudo tar -C /var/lib/minikube/build/build.497934773 -xf /var/lib/minikube/build/build.497934773.tar
I0408 18:43:26.179680    9520 docker.go:360] Building image: /var/lib/minikube/build/build.497934773
I0408 18:43:26.192301    9520 ssh_runner.go:195] Run: docker build -t localhost/my-image:functional-873100 /var/lib/minikube/build/build.497934773
ERROR: error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
I0408 18:44:08.262114    9520 ssh_runner.go:235] Completed: docker build -t localhost/my-image:functional-873100 /var/lib/minikube/build/build.497934773: (42.0689596s)
W0408 18:44:08.262354    9520 build_images.go:125] Failed to build image for profile functional-873100. make sure the profile is running. Docker build /var/lib/minikube/build/build.497934773.tar: buildimage docker: docker build -t localhost/my-image:functional-873100 /var/lib/minikube/build/build.497934773: Process exited with status 1
stdout:

                                                
                                                
stderr:
ERROR: error during connect: Head "http://%2Fvar%2Frun%2Fdocker.sock/_ping": read unix @->/var/run/docker.sock: read: connection reset by peer
I0408 18:44:08.262439    9520 build_images.go:133] succeeded building to: 
I0408 18:44:08.262486    9520 build_images.go:134] failed building to: functional-873100
W0408 18:44:08.294084    9520 root.go:91] failed to log command end to audit: failed to find a log row with id equals to d0166cb8-d626-43fc-aaf9-6d4e6a8dc9fd
functional_test.go:468: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls
functional_test.go:468: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls: (1m0.2216428s)
functional_test.go:463: expected "localhost/my-image:functional-873100" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageBuild (120.39s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadDaemon (121.68s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadDaemon
functional_test.go:372: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr
functional_test.go:372: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr: (1m1.3978106s)
functional_test.go:468: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls
functional_test.go:468: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls: (1m0.2807153s)
functional_test.go:463: expected "kicbase/echo-server:functional-873100" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageLoadDaemon (121.68s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageReloadDaemon (120.5s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageReloadDaemon
functional_test.go:382: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr
functional_test.go:382: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr: (1m0.1958993s)
functional_test.go:468: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls
functional_test.go:468: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls: (1m0.3056959s)
functional_test.go:463: expected "kicbase/echo-server:functional-873100" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageReloadDaemon (120.50s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (120.44s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon
functional_test.go:252: (dbg) Run:  docker pull kicbase/echo-server:latest
functional_test.go:257: (dbg) Run:  docker tag kicbase/echo-server:latest kicbase/echo-server:functional-873100
functional_test.go:262: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr
functional_test.go:262: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image load --daemon kicbase/echo-server:functional-873100 --alsologtostderr: (59.3276818s)
functional_test.go:468: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls
functional_test.go:468: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls: (1m0.2263561s)
functional_test.go:463: expected "kicbase/echo-server:functional-873100" to be loaded into minikube but the image is not there
--- FAIL: TestFunctional/parallel/ImageCommands/ImageTagAndLoadDaemon (120.44s)

                                                
                                    
x
+
TestFunctional/parallel/DockerEnv/powershell (471.35s)

                                                
                                                
=== RUN   TestFunctional/parallel/DockerEnv/powershell
functional_test.go:516: (dbg) Run:  powershell.exe -NoProfile -NonInteractive "out/minikube-windows-amd64.exe -p functional-873100 docker-env | Invoke-Expression ; out/minikube-windows-amd64.exe status -p functional-873100"
E0408 18:36:52.227289    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
functional_test.go:516: (dbg) Non-zero exit: powershell.exe -NoProfile -NonInteractive "out/minikube-windows-amd64.exe -p functional-873100 docker-env | Invoke-Expression ; out/minikube-windows-amd64.exe status -p functional-873100": exit status 1 (7m51.3426067s)

                                                
                                                
** stderr ** 
	X Exiting due to MK_DOCKER_SCRIPT: Error generating set output: write /dev/stdout: The pipe is being closed.
	* 
	╭────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                            │
	│    * If the above advice does not help, please let us know:                                                                │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                              │
	│                                                                                                                            │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.                                   │
	│    * Please also attach the following file to the GitHub issue:                                                            │
	│    * - C:\Users\jenkins.minikube3\AppData\Local\Temp\minikube_docker-env_1e51fd752a804983ed180295403359f1417a1165_2.log    │
	│                                                                                                                            │
	╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	E0408 18:44:08.369831    8108 out.go:221] Fprintf failed: write /dev/stdout: The pipe is being closed.

                                                
                                                
** /stderr **
functional_test.go:519: failed to run the command by deadline. exceeded timeout. powershell.exe -NoProfile -NonInteractive "out/minikube-windows-amd64.exe -p functional-873100 docker-env | Invoke-Expression ; out/minikube-windows-amd64.exe status -p functional-873100"
functional_test.go:522: failed to do status after eval-ing docker-env. error: exit status 1
--- FAIL: TestFunctional/parallel/DockerEnv/powershell (471.35s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveToFile (60.22s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveToFile
functional_test.go:397: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image save kicbase/echo-server:functional-873100 C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar --alsologtostderr
functional_test.go:397: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image save kicbase/echo-server:functional-873100 C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar --alsologtostderr: (1m0.2168811s)
functional_test.go:403: expected "C:\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar" to exist after `image save`, but doesn't exist
--- FAIL: TestFunctional/parallel/ImageCommands/ImageSaveToFile (60.22s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.46s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageLoadFromFile
functional_test.go:426: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image load C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar --alsologtostderr
functional_test.go:426: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 image load C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar --alsologtostderr: exit status 80 (451.611ms)

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 18:41:07.489711    6140 out.go:345] Setting OutFile to fd 1328 ...
	I0408 18:41:07.609416    6140 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:41:07.609416    6140 out.go:358] Setting ErrFile to fd 712...
	I0408 18:41:07.609416    6140 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:41:07.624809    6140 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:41:07.625819    6140 localpath.go:146] windows sanitize: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\images\amd64\C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\images\amd64\C_\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	I0408 18:41:07.755402    6140 cache.go:107] acquiring lock: {Name:mk57ae7409d68a0a0186e75c02ac3514b7d765cc Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:41:07.757694    6140 cache.go:96] cache image "C:\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar" -> "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar" took 131.8741ms
	I0408 18:41:07.761670    6140 out.go:201] 
	W0408 18:41:07.764684    6140 out.go:270] X Exiting due to GUEST_IMAGE_LOAD: Failed to load image: save to dir: caching images: caching image "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar": parsing image ref name for C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar: could not parse reference: C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	X Exiting due to GUEST_IMAGE_LOAD: Failed to load image: save to dir: caching images: caching image "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar": parsing image ref name for C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar: could not parse reference: C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	W0408 18:41:07.764684    6140 out.go:270] * 
	* 
	W0408 18:41:07.801309    6140 out.go:293] ╭───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                       │
	│    * If the above advice does not help, please let us know:                                                           │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                         │
	│                                                                                                                       │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.                              │
	│    * Please also attach the following file to the GitHub issue:                                                       │
	│    * - C:\Users\jenkins.minikube3\AppData\Local\Temp\minikube_image_d1ea4e0bb3a97cf650de5e2f5458414f1862ece0_0.log    │
	│                                                                                                                       │
	╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	╭───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                       │
	│    * If the above advice does not help, please let us know:                                                           │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                         │
	│                                                                                                                       │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.                              │
	│    * Please also attach the following file to the GitHub issue:                                                       │
	│    * - C:\Users\jenkins.minikube3\AppData\Local\Temp\minikube_image_d1ea4e0bb3a97cf650de5e2f5458414f1862ece0_0.log    │
	│                                                                                                                       │
	╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 18:41:07.803833    6140 out.go:201] 

                                                
                                                
** /stderr **
functional_test.go:428: loading image into minikube from file: exit status 80

                                                
                                                
-- stdout --
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 18:41:07.489711    6140 out.go:345] Setting OutFile to fd 1328 ...
	I0408 18:41:07.609416    6140 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:41:07.609416    6140 out.go:358] Setting ErrFile to fd 712...
	I0408 18:41:07.609416    6140 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:41:07.624809    6140 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:41:07.625819    6140 localpath.go:146] windows sanitize: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\images\amd64\C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\images\amd64\C_\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	I0408 18:41:07.755402    6140 cache.go:107] acquiring lock: {Name:mk57ae7409d68a0a0186e75c02ac3514b7d765cc Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:41:07.757694    6140 cache.go:96] cache image "C:\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar" -> "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar" took 131.8741ms
	I0408 18:41:07.761670    6140 out.go:201] 
	W0408 18:41:07.764684    6140 out.go:270] X Exiting due to GUEST_IMAGE_LOAD: Failed to load image: save to dir: caching images: caching image "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar": parsing image ref name for C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar: could not parse reference: C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	X Exiting due to GUEST_IMAGE_LOAD: Failed to load image: save to dir: caching images: caching image "C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\cache\\images\\amd64\\C_\\jenkins\\workspace\\Hyper-V_Windows_integration\\echo-server-save.tar": parsing image ref name for C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar: could not parse reference: C:\jenkins\workspace\Hyper-V_Windows_integration\echo-server-save.tar
	W0408 18:41:07.764684    6140 out.go:270] * 
	* 
	W0408 18:41:07.801309    6140 out.go:293] ╭───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                       │
	│    * If the above advice does not help, please let us know:                                                           │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                         │
	│                                                                                                                       │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.                              │
	│    * Please also attach the following file to the GitHub issue:                                                       │
	│    * - C:\Users\jenkins.minikube3\AppData\Local\Temp\minikube_image_d1ea4e0bb3a97cf650de5e2f5458414f1862ece0_0.log    │
	│                                                                                                                       │
	╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	╭───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                                                       │
	│    * If the above advice does not help, please let us know:                                                           │
	│      https://github.com/kubernetes/minikube/issues/new/choose                                                         │
	│                                                                                                                       │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.                              │
	│    * Please also attach the following file to the GitHub issue:                                                       │
	│    * - C:\Users\jenkins.minikube3\AppData\Local\Temp\minikube_image_d1ea4e0bb3a97cf650de5e2f5458414f1862ece0_0.log    │
	│                                                                                                                       │
	╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 18:41:07.803833    6140 out.go:201] 

                                                
                                                
** /stderr **
--- FAIL: TestFunctional/parallel/ImageCommands/ImageLoadFromFile (0.46s)

                                                
                                    
x
+
TestMultiControlPlane/serial/PingHostFromPods (70.15s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/PingHostFromPods
ha_test.go:199: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:207: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- sh -c "ping -c 1 172.22.32.1"
ha_test.go:218: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- sh -c "ping -c 1 172.22.32.1": exit status 1 (10.5359608s)

                                                
                                                
-- stdout --
	PING 172.22.32.1 (172.22.32.1): 56 data bytes
	
	--- 172.22.32.1 ping statistics ---
	1 packets transmitted, 0 packets received, 100% packet loss

                                                
                                                
-- /stdout --
** stderr ** 
	command terminated with exit code 1

                                                
                                                
** /stderr **
ha_test.go:219: Failed to ping host (172.22.32.1) from pod (busybox-58667487b6-d76nt): exit status 1
ha_test.go:207: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- sh -c "ping -c 1 172.22.32.1"
ha_test.go:218: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- sh -c "ping -c 1 172.22.32.1": exit status 1 (10.5170462s)

                                                
                                                
-- stdout --
	PING 172.22.32.1 (172.22.32.1): 56 data bytes
	
	--- 172.22.32.1 ping statistics ---
	1 packets transmitted, 0 packets received, 100% packet loss

                                                
                                                
-- /stdout --
** stderr ** 
	command terminated with exit code 1

                                                
                                                
** /stderr **
ha_test.go:219: Failed to ping host (172.22.32.1) from pod (busybox-58667487b6-lwn24): exit status 1
ha_test.go:207: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
ha_test.go:218: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- sh -c "ping -c 1 172.22.32.1"
ha_test.go:218: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- sh -c "ping -c 1 172.22.32.1": exit status 1 (10.5327805s)

                                                
                                                
-- stdout --
	PING 172.22.32.1 (172.22.32.1): 56 data bytes
	
	--- 172.22.32.1 ping statistics ---
	1 packets transmitted, 0 packets received, 100% packet loss

                                                
                                                
-- /stdout --
** stderr ** 
	command terminated with exit code 1

                                                
                                                
** /stderr **
ha_test.go:219: Failed to ping host (172.22.32.1) from pod (busybox-58667487b6-snc97): exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p ha-089400 -n ha-089400
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p ha-089400 -n ha-089400: (12.7718601s)
helpers_test.go:244: <<< TestMultiControlPlane/serial/PingHostFromPods FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/PingHostFromPods]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 logs -n 25: (9.1118089s)
helpers_test.go:252: TestMultiControlPlane/serial/PingHostFromPods logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| Command |                 Args                 |      Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	| image   | functional-873100 image build -t     | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:43 UTC |                     |
	|         | localhost/my-image:functional-873100 |                   |                   |         |                     |                     |
	|         | testdata\build --alsologtostderr     |                   |                   |         |                     |                     |
	| image   | functional-873100                    | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:43 UTC | 08 Apr 25 18:44 UTC |
	|         | image ls --format table              |                   |                   |         |                     |                     |
	|         | --alsologtostderr                    |                   |                   |         |                     |                     |
	| image   | functional-873100 image ls           | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:44 UTC | 08 Apr 25 18:45 UTC |
	| delete  | -p functional-873100                 | functional-873100 | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:46 UTC | 08 Apr 25 18:47 UTC |
	| start   | -p ha-089400 --wait=true             | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:47 UTC | 08 Apr 25 18:58 UTC |
	|         | --memory=2200 --ha                   |                   |                   |         |                     |                     |
	|         | -v=7 --alsologtostderr               |                   |                   |         |                     |                     |
	|         | --driver=hyperv                      |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- apply -f             | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | ./testdata/ha/ha-pod-dns-test.yaml   |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- rollout status       | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | deployment/busybox                   |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- get pods -o          | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- get pods -o          | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-d76nt --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-lwn24 --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-snc97 --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.io               |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-d76nt --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-lwn24 --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-snc97 --          |                   |                   |         |                     |                     |
	|         | nslookup kubernetes.default          |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-d76nt -- nslookup |                   |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-lwn24 -- nslookup |                   |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-snc97 -- nslookup |                   |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- get pods -o          | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC | 08 Apr 25 18:59 UTC |
	|         | busybox-58667487b6-d76nt             |                   |                   |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |                   |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 18:59 UTC |                     |
	|         | busybox-58667487b6-d76nt -- sh       |                   |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1             |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:00 UTC | 08 Apr 25 19:00 UTC |
	|         | busybox-58667487b6-lwn24             |                   |                   |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |                   |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:00 UTC |                     |
	|         | busybox-58667487b6-lwn24 -- sh       |                   |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1             |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:00 UTC | 08 Apr 25 19:00 UTC |
	|         | busybox-58667487b6-snc97             |                   |                   |         |                     |                     |
	|         | -- sh -c nslookup                    |                   |                   |         |                     |                     |
	|         | host.minikube.internal | awk         |                   |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                   |                   |         |                     |                     |
	| kubectl | -p ha-089400 -- exec                 | ha-089400         | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:00 UTC |                     |
	|         | busybox-58667487b6-snc97 -- sh       |                   |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1             |                   |                   |         |                     |                     |
	|---------|--------------------------------------|-------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:47:27
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:47:27.389938    9012 out.go:345] Setting OutFile to fd 1668 ...
	I0408 18:47:27.470893    9012 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:47:27.470961    9012 out.go:358] Setting ErrFile to fd 1060...
	I0408 18:47:27.470961    9012 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:47:27.491144    9012 out.go:352] Setting JSON to false
	I0408 18:47:27.495130    9012 start.go:129] hostinfo: {"hostname":"minikube3","uptime":98833,"bootTime":1744039214,"procs":178,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:47:27.495310    9012 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:47:27.501299    9012 out.go:177] * [ha-089400] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:47:27.505771    9012 notify.go:220] Checking for updates...
	I0408 18:47:27.506060    9012 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:47:27.508228    9012 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:47:27.511106    9012 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:47:27.514151    9012 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:47:27.516928    9012 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:47:27.520636    9012 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 18:47:32.886054    9012 out.go:177] * Using the hyperv driver based on user configuration
	I0408 18:47:32.890031    9012 start.go:297] selected driver: hyperv
	I0408 18:47:32.890031    9012 start.go:901] validating driver "hyperv" against <nil>
	I0408 18:47:32.890031    9012 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 18:47:32.938770    9012 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 18:47:32.940498    9012 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:47:32.940731    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:47:32.940731    9012 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 18:47:32.940731    9012 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 18:47:32.940863    9012 start.go:340] cluster config:
	{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker
CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthS
ock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:47:32.941292    9012 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:47:32.946232    9012 out.go:177] * Starting "ha-089400" primary control-plane node in "ha-089400" cluster
	I0408 18:47:32.948683    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:47:32.948880    9012 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 18:47:32.948951    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:47:32.949402    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:47:32.949432    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:47:32.949432    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:47:32.950460    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json: {Name:mk55ac39bf4944b017a16834787f25430b36f60e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:47:32.951496    9012 start.go:360] acquireMachinesLock for ha-089400: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:47:32.951496    9012 start.go:364] duration metric: took 0s to acquireMachinesLock for "ha-089400"
	I0408 18:47:32.951496    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID
:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:47:32.952123    9012 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 18:47:32.958880    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:47:32.958880    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:47:32.958880    9012 client.go:168] LocalClient.Create starting
	I0408 18:47:32.958880    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:47:36.766280    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:47:36.766280    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:36.767283    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:47:38.244880    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:47:38.245436    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:38.245436    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:47:41.873272    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:47:41.873272    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:41.876631    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:47:42.494633    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:47:42.735647    9012 main.go:141] libmachine: Creating VM...
	I0408 18:47:42.735647    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:47:45.673500    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:47:45.674094    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:45.674195    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:47:45.674260    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:47:47.467195    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:47:47.467239    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:47.467239    9012 main.go:141] libmachine: Creating VHD
	I0408 18:47:47.467334    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:47:51.283459    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 5407168E-05F8-4CF3-BBC7-354486FDC2A3
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:47:51.283577    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:51.283647    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:47:51.283647    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:47:51.299198    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:47:54.484434    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:47:54.484434    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:54.485009    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd' -SizeBytes 20000MB
	I0408 18:47:57.078641    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:47:57.079379    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:57.079466    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:48:00.792875    9012 main.go:141] libmachine: [stdout =====>] : 
	Name      State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----      ----- ----------- ----------------- ------   ------             -------
	ha-089400 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:48:00.792875    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:00.793874    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400 -DynamicMemoryEnabled $false
	I0408 18:48:03.041850    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:03.041850    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:03.042814    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400 -Count 2
	I0408 18:48:05.222593    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:05.222593    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:05.222721    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\boot2docker.iso'
	I0408 18:48:07.779036    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:07.779268    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:07.779343    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd'
	I0408 18:48:10.435215    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:10.435918    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:10.435918    9012 main.go:141] libmachine: Starting VM...
	I0408 18:48:10.435989    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:13.587424    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:15.891090    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:15.891195    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:15.891230    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:18.472485    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:18.472485    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:19.473553    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:21.708758    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:21.709558    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:21.709616    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:24.274981    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:24.275051    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:25.276091    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:30.056720    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:30.057027    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:31.058205    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:35.846949    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:35.846949    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:36.847323    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:39.098368    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:39.098368    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:39.098898    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:41.725860    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:41.726081    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:41.726081    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:43.895113    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:43.895497    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:43.895497    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:48:43.895707    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:46.093331    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:46.094142    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:46.094142    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:48.656402    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:48.656402    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:48.663852    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:48.680228    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:48.680228    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:48:48.823952    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:48:48.824058    9012 buildroot.go:166] provisioning hostname "ha-089400"
	I0408 18:48:48.824058    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:51.028511    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:51.028511    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:51.029433    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:53.577231    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:53.577231    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:53.583493    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:53.584187    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:53.584187    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400 && echo "ha-089400" | sudo tee /etc/hostname
	I0408 18:48:53.750398    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400
	
	I0408 18:48:53.750398    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:55.931754    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:55.931754    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:55.932759    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:58.535826    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:58.536778    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:58.545702    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:58.546352    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:58.546352    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:48:58.699846    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:48:58.699927    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:48:58.699927    9012 buildroot.go:174] setting up certificates
	I0408 18:48:58.700018    9012 provision.go:84] configureAuth start
	I0408 18:48:58.700085    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:00.867466    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:00.867987    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:00.867987    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:05.591010    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:05.592086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:05.592227    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:08.128168    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:08.128168    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:08.128168    9012 provision.go:143] copyHostCerts
	I0408 18:49:08.128562    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:49:08.128562    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:49:08.128562    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:49:08.129685    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:49:08.131183    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:49:08.131651    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:49:08.131651    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:49:08.132035    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:49:08.133093    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:49:08.133239    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:49:08.133239    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:49:08.133239    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:49:08.134841    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400 san=[127.0.0.1 172.22.47.59 ha-089400 localhost minikube]
	I0408 18:49:08.456027    9012 provision.go:177] copyRemoteCerts
	I0408 18:49:08.466786    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:49:08.466786    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:10.626994    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:10.627069    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:10.627069    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:13.170636    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:13.170865    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:13.171094    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:13.283620    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.8167959s)
	I0408 18:49:13.283620    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:49:13.284189    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:49:13.332377    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:49:13.333119    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1200 bytes)
	I0408 18:49:13.381101    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:49:13.381101    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0408 18:49:13.428784    9012 provision.go:87] duration metric: took 14.7286504s to configureAuth
	I0408 18:49:13.428894    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:49:13.429519    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:49:13.429519    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:15.586051    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:15.587045    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:15.587045    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:18.135339    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:18.136037    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:18.142374    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:18.143172    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:18.143172    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:49:18.291121    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:49:18.291204    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:49:18.291435    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:49:18.291529    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:20.437874    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:20.437874    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:20.438671    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:22.981428    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:22.981905    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:22.987045    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:22.987876    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:22.987876    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:49:23.153928    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:49:23.153928    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:25.272950    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:25.273112    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:25.273112    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:27.828587    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:27.828587    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:27.834490    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:27.835224    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:27.835376    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:49:30.064090    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:49:30.064090    9012 machine.go:96] duration metric: took 46.1682316s to provisionDockerMachine
	I0408 18:49:30.064090    9012 client.go:171] duration metric: took 1m57.1042998s to LocalClient.Create
	I0408 18:49:30.064090    9012 start.go:167] duration metric: took 1m57.1042998s to libmachine.API.Create "ha-089400"
	I0408 18:49:30.064385    9012 start.go:293] postStartSetup for "ha-089400" (driver="hyperv")
	I0408 18:49:30.064385    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:49:30.078741    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:49:30.078741    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:32.248301    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:32.248301    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:32.248860    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:34.800855    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:34.800855    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:34.801907    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:34.914977    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8361978s)
	I0408 18:49:34.926823    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:49:34.935556    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:49:34.935713    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:49:34.936489    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:49:34.938022    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:49:34.938117    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:49:34.951341    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:49:34.969357    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:49:35.014168    9012 start.go:296] duration metric: took 4.9497105s for postStartSetup
	I0408 18:49:35.017999    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:37.171114    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:37.172073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:37.172256    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:39.773538    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:39.773538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:39.774394    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:49:39.777377    9012 start.go:128] duration metric: took 2m6.8242664s to createHost
	I0408 18:49:39.777377    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:41.938801    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:41.938801    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:41.938964    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:44.499758    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:44.499758    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:44.506317    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:44.506550    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:44.506550    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:49:44.637495    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138184.642643337
	
	I0408 18:49:44.637663    9012 fix.go:216] guest clock: 1744138184.642643337
	I0408 18:49:44.637663    9012 fix.go:229] Guest: 2025-04-08 18:49:44.642643337 +0000 UTC Remote: 2025-04-08 18:49:39.7773774 +0000 UTC m=+132.483166301 (delta=4.865265937s)
	I0408 18:49:44.637998    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:49.337665    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:49.337665    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:49.344623    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:49.344623    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:49.344623    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138184
	I0408 18:49:49.501380    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:49:44 UTC 2025
	
	I0408 18:49:49.501380    9012 fix.go:236] clock set: Tue Apr  8 18:49:44 UTC 2025
	 (err=<nil>)
	I0408 18:49:49.501380    9012 start.go:83] releasing machines lock for "ha-089400", held for 2m16.5488195s
	I0408 18:49:49.502025    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:51.719308    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:51.720482    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:51.720579    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:54.254447    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:54.254447    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:54.259662    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:49:54.260310    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:54.272717    9012 ssh_runner.go:195] Run: cat /version.json
	I0408 18:49:54.272717    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:56.508785    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:59.226023    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:59.227038    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:59.227156    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:59.255668    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:59.256086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:59.256277    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:59.321952    9012 ssh_runner.go:235] Completed: cat /version.json: (5.0491947s)
	I0408 18:49:59.334260    9012 ssh_runner.go:195] Run: systemctl --version
	I0408 18:49:59.339399    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.0796961s)
	W0408 18:49:59.339399    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:49:59.354786    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	W0408 18:49:59.365178    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:49:59.375623    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:49:59.404462    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:49:59.404524    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:49:59.404524    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:49:59.452294    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 18:49:59.457495    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:49:59.457495    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:49:59.486350    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:49:59.507038    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:49:59.517686    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:49:59.547953    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:49:59.576688    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:49:59.609190    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:49:59.638716    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:49:59.668385    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:49:59.699189    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:49:59.729769    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:49:59.760311    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:49:59.780259    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:49:59.790262    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:49:59.827955    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:49:59.854821    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:00.057198    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:50:00.088370    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:50:00.098193    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:50:00.135125    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:50:00.167126    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:50:00.210084    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:50:00.244775    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:50:00.279654    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:50:00.341556    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:50:00.368587    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:50:00.414537    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:50:00.434810    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:50:00.453295    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:50:00.493338    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:50:00.692389    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:50:00.873048    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:50:00.873217    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:50:00.915246    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:01.101816    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:50:03.673068    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.571231s)
	I0408 18:50:03.684170    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:50:03.723824    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:50:03.765446    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:50:03.961805    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:50:04.169850    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:04.366467    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:50:04.408914    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:50:04.442222    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:04.633600    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:50:04.734114    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:50:04.745637    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:50:04.757091    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:50:04.768809    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:50:04.786302    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:50:04.839171    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:50:04.848578    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:50:04.890124    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:50:04.922882    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:50:04.923155    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:50:04.930572    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:50:04.930572    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:50:04.941580    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:50:04.947683    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:50:04.980139    9012 kubeadm.go:883] updating cluster {Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespac
e:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mo
untUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 18:50:04.980139    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:50:04.988140    9012 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 18:50:05.013782    9012 docker.go:689] Got preloaded images: 
	I0408 18:50:05.013782    9012 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 18:50:05.026405    9012 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 18:50:05.056599    9012 ssh_runner.go:195] Run: which lz4
	I0408 18:50:05.064079    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 18:50:05.075283    9012 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 18:50:05.082136    9012 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 18:50:05.082136    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 18:50:06.857938    9012 docker.go:653] duration metric: took 1.7934577s to copy over tarball
	I0408 18:50:06.868970    9012 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 18:50:15.385073    9012 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (8.5150544s)
	I0408 18:50:15.385073    9012 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 18:50:15.447833    9012 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 18:50:15.470230    9012 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 18:50:15.514355    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:15.713001    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:50:18.866939    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1539127s)
	I0408 18:50:18.876869    9012 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 18:50:18.905603    9012 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 18:50:18.905680    9012 cache_images.go:84] Images are preloaded, skipping loading
	I0408 18:50:18.905744    9012 kubeadm.go:934] updating node { 172.22.47.59 8443 v1.32.2 docker true true} ...
	I0408 18:50:18.906272    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.47.59
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:50:18.918677    9012 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 18:50:18.984498    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:50:18.984546    9012 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 18:50:18.984546    9012 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 18:50:18.984546    9012 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.47.59 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-089400 NodeName:ha-089400 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.47.59"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.47.59 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/ma
nifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 18:50:18.984546    9012 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.47.59
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "ha-089400"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.47.59"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.47.59"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 18:50:18.984546    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:50:18.996646    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:50:19.021375    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:50:19.021646    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:50:19.033800    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:50:19.048707    9012 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 18:50:19.059700    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0408 18:50:19.078525    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (308 bytes)
	I0408 18:50:19.113741    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:50:19.142569    9012 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2287 bytes)
	I0408 18:50:19.171509    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0408 18:50:19.215137    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:50:19.221091    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:50:19.250736    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:19.429220    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:50:19.456418    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.47.59
	I0408 18:50:19.456418    9012 certs.go:194] generating shared ca certs ...
	I0408 18:50:19.456418    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:19.457222    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:50:19.457741    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:50:19.457986    9012 certs.go:256] generating profile certs ...
	I0408 18:50:19.458477    9012 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:50:19.458477    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt with IP's: []
	I0408 18:50:20.001097    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt ...
	I0408 18:50:20.001097    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt: {Name:mk091a720dfa0c60306c7ae51fa6699a7b88e9c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.003307    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key ...
	I0408 18:50:20.003307    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key: {Name:mkfd1fda470bd79cdab55ffdf1b4b18ba5f62b7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.004976    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b
	I0408 18:50:20.004976    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.47.254]
	I0408 18:50:20.085546    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b ...
	I0408 18:50:20.085546    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b: {Name:mk01f3daa70ba5db3297bb03014cd9da2298fbac Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.086146    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b ...
	I0408 18:50:20.086146    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b: {Name:mk98c4e7ff1a4c18dc165d12fc3c5becc04d03d0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.087291    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:50:20.105505    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:50:20.107835    9012 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:50:20.108200    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt with IP's: []
	I0408 18:50:20.196099    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt ...
	I0408 18:50:20.197136    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt: {Name:mk77d14bfc20c02c360eb657f882d9f3b17479e3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.198546    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key ...
	I0408 18:50:20.198546    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key: {Name:mkad206614b853ee3175ac3234f6ca59f16f8f18 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.199044    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:50:20.200088    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:50:20.200335    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:50:20.212266    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:50:20.213367    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:50:20.213367    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:50:20.213367    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:50:20.214622    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:50:20.214951    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:50:20.215356    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:50:20.215668    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:50:20.216253    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.216301    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.216555    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:20.216782    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:50:20.264741    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:50:20.309146    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:50:20.354848    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:50:20.409399    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 18:50:20.462267    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:50:20.502704    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:50:20.554355    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:50:20.599417    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:50:20.644510    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:50:20.687024    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:50:20.730482    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 18:50:20.771269    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:50:20.790402    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:50:20.818848    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.826653    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.840467    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.859992    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:50:20.892550    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:50:20.924269    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.930952    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.939950    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.957703    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:50:20.987944    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:50:21.021785    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.029062    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.040161    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.065815    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:50:21.099444    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:50:21.105805    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:50:21.106098    9012 kubeadm.go:392] StartCluster: {Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:d
efault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mount
UID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:50:21.115691    9012 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 18:50:21.153086    9012 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 18:50:21.184578    9012 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 18:50:21.214637    9012 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 18:50:21.230918    9012 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 18:50:21.231008    9012 kubeadm.go:157] found existing configuration files:
	
	I0408 18:50:21.243090    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 18:50:21.267695    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 18:50:21.278822    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 18:50:21.313205    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 18:50:21.329095    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 18:50:21.340305    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 18:50:21.367959    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 18:50:21.384560    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 18:50:21.397334    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 18:50:21.428112    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 18:50:21.446390    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 18:50:21.458024    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 18:50:21.475173    9012 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 18:50:21.935258    9012 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 18:50:36.267696    9012 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 18:50:36.267844    9012 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 18:50:36.268242    9012 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 18:50:36.268271    9012 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 18:50:36.268271    9012 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 18:50:36.268828    9012 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 18:50:36.271286    9012 out.go:235]   - Generating certificates and keys ...
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 18:50:36.272685    9012 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 18:50:36.272858    9012 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 18:50:36.273046    9012 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 18:50:36.273098    9012 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-089400 localhost] and IPs [172.22.47.59 127.0.0.1 ::1]
	I0408 18:50:36.273098    9012 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 18:50:36.273665    9012 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-089400 localhost] and IPs [172.22.47.59 127.0.0.1 ::1]
	I0408 18:50:36.273820    9012 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 18:50:36.273918    9012 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 18:50:36.273976    9012 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 18:50:36.274573    9012 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 18:50:36.274698    9012 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 18:50:36.274811    9012 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 18:50:36.274921    9012 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 18:50:36.274921    9012 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 18:50:36.280848    9012 out.go:235]   - Booting up control plane ...
	I0408 18:50:36.280848    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 18:50:36.281462    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 18:50:36.281462    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 18:50:36.282025    9012 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 18:50:36.282229    9012 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 18:50:36.282278    9012 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 18:50:36.282278    9012 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 18:50:36.282797    9012 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00197679s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [api-check] The API server is healthy after 8.003425301s
	I0408 18:50:36.283479    9012 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 18:50:36.283479    9012 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 18:50:36.283479    9012 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 18:50:36.284084    9012 kubeadm.go:310] [mark-control-plane] Marking the node ha-089400 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 18:50:36.284620    9012 kubeadm.go:310] [bootstrap-token] Using token: ixg5xa.82wpmzozhmishmjw
	I0408 18:50:36.287677    9012 out.go:235]   - Configuring RBAC rules ...
	I0408 18:50:36.287853    9012 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 18:50:36.288049    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 18:50:36.288049    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 18:50:36.288713    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 18:50:36.288713    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 18:50:36.289233    9012 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 18:50:36.289439    9012 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 18:50:36.289439    9012 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 18:50:36.289439    9012 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 18:50:36.289439    9012 kubeadm.go:310] 
	I0408 18:50:36.289439    9012 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 18:50:36.289969    9012 kubeadm.go:310] 
	I0408 18:50:36.290013    9012 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 18:50:36.290013    9012 kubeadm.go:310] 
	I0408 18:50:36.290013    9012 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 18:50:36.290013    9012 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 18:50:36.290013    9012 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 18:50:36.290532    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 18:50:36.290661    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 18:50:36.290661    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 18:50:36.291192    9012 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 18:50:36.291385    9012 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 18:50:36.291450    9012 kubeadm.go:310] 
	I0408 18:50:36.291450    9012 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 18:50:36.291450    9012 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 18:50:36.291975    9012 kubeadm.go:310] 
	I0408 18:50:36.292098    9012 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token ixg5xa.82wpmzozhmishmjw \
	I0408 18:50:36.292232    9012 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 18:50:36.292232    9012 kubeadm.go:310] 	--control-plane 
	I0408 18:50:36.292232    9012 kubeadm.go:310] 
	I0408 18:50:36.292232    9012 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 18:50:36.292232    9012 kubeadm.go:310] 
	I0408 18:50:36.292808    9012 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token ixg5xa.82wpmzozhmishmjw \
	I0408 18:50:36.292961    9012 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 18:50:36.292961    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:50:36.292961    9012 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 18:50:36.297982    9012 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 18:50:36.312878    9012 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 18:50:36.319888    9012 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 18:50:36.319888    9012 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 18:50:36.368213    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 18:50:37.011649    9012 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 18:50:37.025776    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:37.025776    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400 minikube.k8s.io/updated_at=2025_04_08T18_50_37_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=true
	I0408 18:50:37.041765    9012 ops.go:34] apiserver oom_adj: -16
	I0408 18:50:37.256277    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:37.756146    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:38.255738    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:38.756244    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:39.255970    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:39.756636    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:40.253299    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:40.410128    9012 kubeadm.go:1113] duration metric: took 3.3982594s to wait for elevateKubeSystemPrivileges
	I0408 18:50:40.410128    9012 kubeadm.go:394] duration metric: took 19.3038762s to StartCluster
	I0408 18:50:40.410128    9012 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:40.410128    9012 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:50:40.412142    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:40.413135    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 18:50:40.413135    9012 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:50:40.413135    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:50:40.413135    9012 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 18:50:40.413135    9012 addons.go:69] Setting storage-provisioner=true in profile "ha-089400"
	I0408 18:50:40.413135    9012 addons.go:238] Setting addon storage-provisioner=true in "ha-089400"
	I0408 18:50:40.413135    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:50:40.413135    9012 addons.go:69] Setting default-storageclass=true in profile "ha-089400"
	I0408 18:50:40.414139    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:50:40.414139    9012 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-089400"
	I0408 18:50:40.414139    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:40.415143    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:40.561974    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 18:50:40.931001    9012 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 18:50:42.810264    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:42.810804    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:42.812237    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:50:42.813438    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 18:50:42.814459    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:42.814459    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:42.816934    9012 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 18:50:42.816934    9012 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 18:50:42.817497    9012 addons.go:238] Setting addon default-storageclass=true in "ha-089400"
	I0408 18:50:42.817497    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:50:42.818473    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:42.820482    9012 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 18:50:42.820482    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 18:50:42.820482    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:45.323927    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:45.324004    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:45.324054    9012 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 18:50:45.324149    9012 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 18:50:45.324219    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:45.337850    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:45.337938    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:45.338100    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:50:47.696263    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:47.696404    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:47.696404    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:50:48.133507    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:50:48.134449    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:48.134714    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:50:48.293990    9012 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 18:50:50.388499    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:50:50.389442    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:50.389796    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:50:50.542944    9012 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 18:50:50.729333    9012 round_trippers.go:470] GET https://172.22.47.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 18:50:50.729430    9012 round_trippers.go:476] Request Headers:
	I0408 18:50:50.729430    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:50:50.729430    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:50:50.742651    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:50:50.743384    9012 round_trippers.go:470] PUT https://172.22.47.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 18:50:50.743437    9012 round_trippers.go:476] Request Headers:
	I0408 18:50:50.743437    9012 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 18:50:50.743437    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:50:50.743437    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:50:50.757119    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:50:50.760493    9012 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 18:50:50.763250    9012 addons.go:514] duration metric: took 10.350032s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 18:50:50.764232    9012 start.go:246] waiting for cluster config update ...
	I0408 18:50:50.764232    9012 start.go:255] writing updated cluster config ...
	I0408 18:50:50.766224    9012 out.go:201] 
	I0408 18:50:50.782463    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:50:50.782707    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:50:50.788640    9012 out.go:177] * Starting "ha-089400-m02" control-plane node in "ha-089400" cluster
	I0408 18:50:50.791459    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:50:50.791616    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:50:50.791994    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:50:50.791994    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:50:50.791994    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:50:50.800445    9012 start.go:360] acquireMachinesLock for ha-089400-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:50:50.800445    9012 start.go:364] duration metric: took 0s to acquireMachinesLock for "ha-089400-m02"
	I0408 18:50:50.800445    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mo
unt:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:50:50.801452    9012 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 18:50:50.805459    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:50:50.805459    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:50:50.805459    9012 client.go:168] LocalClient.Create starting
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:50:50.807438    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:50:50.807438    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:50:50.807438    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:50:52.829281    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:50:52.829281    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:52.829572    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:50:54.574685    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:50:54.574685    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:54.575073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:50:56.062297    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:50:56.062297    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:56.063134    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:50:59.740300    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:50:59.740494    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:59.743376    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:51:00.348807    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:51:00.699337    9012 main.go:141] libmachine: Creating VM...
	I0408 18:51:00.699337    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:51:03.778696    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:51:03.779083    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:03.779492    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:51:03.779568    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:05.615031    9012 main.go:141] libmachine: Creating VHD
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:51:09.566685    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : F69FC23E-1AA7-435D-8A7D-CA55C9612E7E
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:51:09.567538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:09.567622    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:51:09.567622    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:51:09.581618    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:51:12.834819    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:12.835843    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:12.835887    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd' -SizeBytes 20000MB
	I0408 18:51:15.455884    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:15.456251    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:15.456308    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:51:19.171199    9012 main.go:141] libmachine: [stdout =====>] : 
	Name          State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----          ----- ----------- ----------------- ------   ------             -------
	ha-089400-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:51:19.171294    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:19.171437    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400-m02 -DynamicMemoryEnabled $false
	I0408 18:51:21.471292    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:21.471711    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:21.471711    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400-m02 -Count 2
	I0408 18:51:23.741231    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:23.741231    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:23.741773    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\boot2docker.iso'
	I0408 18:51:26.337388    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:26.337388    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:26.338383    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd'
	I0408 18:51:29.039954    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:29.039954    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:29.040577    9012 main.go:141] libmachine: Starting VM...
	I0408 18:51:29.040577    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400-m02
	I0408 18:51:32.215515    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:32.215834    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:32.215834    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:51:32.215914    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:34.610050    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:34.610050    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:34.611104    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:37.194192    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:37.194455    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:38.194811    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:40.524583    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:40.524583    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:40.525378    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:43.081997    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:43.081997    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:44.083954    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:46.364842    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:46.364961    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:46.365041    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:48.933872    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:48.933872    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:49.934148    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:52.277229    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:52.277229    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:52.277922    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:54.870878    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:54.870878    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:55.871599    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:58.140324    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:58.140324    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:58.140758    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:00.779446    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:00.779446    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:00.780165    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:02.952796    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:02.953683    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:02.953683    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:52:02.953899    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:05.160496    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:05.160496    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:05.161192    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:07.800591    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:07.800591    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:07.807136    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:07.824300    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:07.824300    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:52:07.964932    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:52:07.965475    9012 buildroot.go:166] provisioning hostname "ha-089400-m02"
	I0408 18:52:07.965475    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:10.158538    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:10.158538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:10.159403    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:12.742532    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:12.742532    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:12.749219    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:12.749540    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:12.749540    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400-m02 && echo "ha-089400-m02" | sudo tee /etc/hostname
	I0408 18:52:12.922643    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400-m02
	
	I0408 18:52:12.922830    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:15.084598    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:15.084598    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:15.085395    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:17.657739    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:17.657803    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:17.663672    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:17.664422    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:17.664422    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:52:17.820320    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:52:17.820320    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:52:17.820320    9012 buildroot.go:174] setting up certificates
	I0408 18:52:17.820854    9012 provision.go:84] configureAuth start
	I0408 18:52:17.820890    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:20.037843    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:20.038712    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:20.038712    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:24.966902    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:24.967403    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:24.967468    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:27.637272    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:27.637272    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:27.637272    9012 provision.go:143] copyHostCerts
	I0408 18:52:27.637984    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:52:27.638347    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:52:27.638347    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:52:27.638347    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:52:27.639959    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:52:27.639959    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:52:27.639959    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:52:27.640630    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:52:27.641444    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:52:27.641444    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:52:27.641444    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:52:27.642069    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:52:27.643419    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400-m02 san=[127.0.0.1 172.22.34.212 ha-089400-m02 localhost minikube]
	I0408 18:52:27.975361    9012 provision.go:177] copyRemoteCerts
	I0408 18:52:27.986600    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:52:27.986600    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:30.225586    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:30.225586    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:30.225656    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:32.846192    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:32.846192    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:32.846824    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:52:32.960996    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.9743566s)
	I0408 18:52:32.961080    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:52:32.961171    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:52:33.010174    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:52:33.010174    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1208 bytes)
	I0408 18:52:33.060075    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:52:33.060075    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0408 18:52:33.114525    9012 provision.go:87] duration metric: took 15.2935482s to configureAuth
	I0408 18:52:33.114525    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:52:33.115172    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:52:33.115396    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:35.305726    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:35.305726    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:35.305916    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:37.916421    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:37.916421    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:37.921886    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:37.922775    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:37.922775    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:52:38.063175    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:52:38.063175    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:52:38.063175    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:52:38.063175    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:40.261159    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:40.261885    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:40.261967    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:42.883372    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:42.883372    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:42.889966    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:42.890668    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:42.890668    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.47.59"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:52:43.065595    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.47.59
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:52:43.066170    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:45.286392    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:45.286392    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:45.287289    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:47.895242    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:47.895242    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:47.902861    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:47.903624    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:47.903624    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:52:50.193404    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:52:50.193477    9012 machine.go:96] duration metric: took 47.2394153s to provisionDockerMachine
	I0408 18:52:50.193477    9012 client.go:171] duration metric: took 1m59.3870628s to LocalClient.Create
	I0408 18:52:50.193571    9012 start.go:167] duration metric: took 1m59.3870628s to libmachine.API.Create "ha-089400"
	I0408 18:52:50.193597    9012 start.go:293] postStartSetup for "ha-089400-m02" (driver="hyperv")
	I0408 18:52:50.193597    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:52:50.206370    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:52:50.206370    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:54.966454    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:54.966877    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:54.967115    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:52:55.079170    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8727163s)
	I0408 18:52:55.091782    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:52:55.098278    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:52:55.098442    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:52:55.098890    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:52:55.099859    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:52:55.099926    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:52:55.112480    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:52:55.130807    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:52:55.177794    9012 start.go:296] duration metric: took 4.9840997s for postStartSetup
	I0408 18:52:55.180992    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:57.331126    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:57.331613    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:57.331613    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:59.879853    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:59.879853    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:59.881016    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:52:59.883357    9012 start.go:128] duration metric: took 2m9.0808718s to createHost
	I0408 18:52:59.883499    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:02.058137    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:02.059154    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:02.059154    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:04.665681    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:04.666224    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:04.672236    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:53:04.673123    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:53:04.673123    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:53:04.807349    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138384.811839667
	
	I0408 18:53:04.807349    9012 fix.go:216] guest clock: 1744138384.811839667
	I0408 18:53:04.807349    9012 fix.go:229] Guest: 2025-04-08 18:53:04.811839667 +0000 UTC Remote: 2025-04-08 18:52:59.883499 +0000 UTC m=+332.587692001 (delta=4.928340667s)
	I0408 18:53:04.807444    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:06.996624    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:06.997557    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:06.997649    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:09.627263    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:09.628114    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:09.634268    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:53:09.635011    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:53:09.635011    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138384
	I0408 18:53:09.796868    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:53:04 UTC 2025
	
	I0408 18:53:09.796868    9012 fix.go:236] clock set: Tue Apr  8 18:53:04 UTC 2025
	 (err=<nil>)
	I0408 18:53:09.796868    9012 start.go:83] releasing machines lock for "ha-089400-m02", held for 2m18.9953105s
	I0408 18:53:09.797172    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:14.698789    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:14.698789    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:14.702776    9012 out.go:177] * Found network options:
	I0408 18:53:14.705848    9012 out.go:177]   - NO_PROXY=172.22.47.59
	W0408 18:53:14.708025    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:53:14.710106    9012 out.go:177]   - NO_PROXY=172.22.47.59
	W0408 18:53:14.713348    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:53:14.714576    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:53:14.717319    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:53:14.717319    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:14.726311    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 18:53:14.726311    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:16.996744    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:16.996807    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:19.758008    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:19.758167    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:19.758167    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:53:19.784461    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:19.784461    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:19.784669    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:53:19.851171    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1337293s)
	W0408 18:53:19.851171    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:53:19.886095    9012 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.1597421s)
	W0408 18:53:19.886095    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:53:19.898796    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:53:19.928255    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:53:19.928255    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:53:19.928478    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:53:19.975658    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 18:53:20.009295    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:53:20.030071    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	W0408 18:53:20.031054    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:53:20.031054    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:53:20.041542    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:53:20.075859    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:53:20.107245    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:53:20.140060    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:53:20.169880    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:53:20.203679    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:53:20.234319    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:53:20.263348    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:53:20.293136    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:53:20.310909    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:53:20.323813    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:53:20.358562    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:53:20.385889    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:20.602442    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:53:20.642238    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:53:20.654955    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:53:20.688363    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:53:20.720343    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:53:20.763341    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:53:20.799250    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:53:20.836038    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:53:20.893241    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:53:20.915790    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:53:20.966631    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:53:20.986380    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:53:21.002898    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:53:21.045328    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:53:21.248505    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:53:21.440253    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:53:21.440253    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:53:21.483982    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:21.669200    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:53:24.313290    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6440688s)
	I0408 18:53:24.327954    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:53:24.362931    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:53:24.401268    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:53:24.620390    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:53:24.831498    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:25.027238    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:53:25.070346    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:53:25.107147    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:25.309870    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:53:25.421414    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:53:25.434249    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:53:25.442491    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:53:25.454355    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:53:25.472376    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:53:25.530911    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:53:25.541418    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:53:25.591357    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:53:25.628428    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:53:25.631221    9012 out.go:177]   - env NO_PROXY=172.22.47.59
	I0408 18:53:25.634064    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:53:25.641312    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:53:25.641312    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:53:25.650763    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:53:25.657404    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:53:25.678843    9012 mustload.go:65] Loading cluster: ha-089400
	I0408 18:53:25.679506    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:53:25.679705    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:27.857804    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:27.857804    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:27.858275    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:53:27.859100    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.34.212
	I0408 18:53:27.859100    9012 certs.go:194] generating shared ca certs ...
	I0408 18:53:27.859232    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:27.859621    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:53:27.860543    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:53:27.860775    9012 certs.go:256] generating profile certs ...
	I0408 18:53:27.861585    9012 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:53:27.861902    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4
	I0408 18:53:27.862062    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.34.212 172.22.47.254]
	I0408 18:53:28.151093    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 ...
	I0408 18:53:28.151093    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4: {Name:mke938a312a87a4d60be7a0c841b7b96f066f3e0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:28.152605    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4 ...
	I0408 18:53:28.152605    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4: {Name:mkaea1d23176606a41bfae74829fb46c1c1b82ca Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:28.153405    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:53:28.174325    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:53:28.175898    9012 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:53:28.176106    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:53:28.176240    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:53:28.176998    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:53:28.177354    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:53:28.177354    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:53:28.178439    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:53:28.178730    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:53:28.178893    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:53:28.178893    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:53:28.179793    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:53:28.179888    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:53:28.179952    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:53:28.179952    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:53:28.180816    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:53:28.180948    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:53:28.180948    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:53:28.181532    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:28.181887    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:53:28.182039    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:30.403804    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:30.404390    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:30.404390    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:33.000299    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:53:33.000299    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:33.000909    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:53:33.101584    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0408 18:53:33.110089    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0408 18:53:33.145086    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0408 18:53:33.151672    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0408 18:53:33.186697    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0408 18:53:33.193271    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0408 18:53:33.223666    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0408 18:53:33.229946    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0408 18:53:33.262262    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0408 18:53:33.270156    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0408 18:53:33.299929    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0408 18:53:33.306482    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0408 18:53:33.326586    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:53:33.374327    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:53:33.418580    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:53:33.466287    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:53:33.512485    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0408 18:53:33.559874    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:53:33.606637    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:53:33.659315    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:53:33.704637    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:53:33.752000    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:53:33.799479    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:53:33.847297    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0408 18:53:33.879497    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0408 18:53:33.911931    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0408 18:53:33.944293    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0408 18:53:33.975154    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0408 18:53:34.005701    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0408 18:53:34.035812    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0408 18:53:34.085951    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:53:34.107716    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:53:34.138099    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.145561    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.156715    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.180434    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:53:34.211793    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:53:34.245161    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.253685    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.264752    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.290058    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:53:34.324012    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:53:34.356307    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.363073    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.373709    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.396329    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:53:34.427012    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:53:34.434309    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:53:34.434309    9012 kubeadm.go:934] updating node {m02 172.22.34.212 8443 v1.32.2 docker true true} ...
	I0408 18:53:34.434855    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.34.212
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:53:34.434972    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:53:34.446276    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:53:34.471377    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:53:34.471521    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:53:34.482250    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:53:34.506426    9012 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.32.2: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.32.2': No such file or directory
	
	Initiating transfer...
	I0408 18:53:34.519007    9012 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.32.2
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl
	I0408 18:53:35.661266    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl -> /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:53:35.686805    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:53:35.693858    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubectl': No such file or directory
	I0408 18:53:35.693858    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl --> /var/lib/minikube/binaries/v1.32.2/kubectl (57323672 bytes)
	I0408 18:53:35.942804    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm -> /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:53:35.955081    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:53:35.971474    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubeadm: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubeadm': No such file or directory
	I0408 18:53:35.971474    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm --> /var/lib/minikube/binaries/v1.32.2/kubeadm (70942872 bytes)
	I0408 18:53:36.225260    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:53:36.303715    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet -> /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:53:36.314355    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:53:36.330893    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubelet: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubelet': No such file or directory
	I0408 18:53:36.330893    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet --> /var/lib/minikube/binaries/v1.32.2/kubelet (77406468 bytes)
	I0408 18:53:37.047075    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0408 18:53:37.073721    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0408 18:53:37.108848    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:53:37.140025    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0408 18:53:37.191089    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:53:37.200878    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:53:37.234891    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:37.431447    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:53:37.460745    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:53:37.461874    9012 start.go:317] joinCluster: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:def
ault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false M
ountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:53:37.462114    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0408 18:53:37.462114    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:39.662992    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:39.662992    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:39.664017    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:42.347902    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:53:42.348677    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:42.348677    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:53:42.859972    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0": (5.3977347s)
	I0408 18:53:42.860224    9012 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:53:42.860309    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token kqgs6j.q4ws2792ks88b89m --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m02 --control-plane --apiserver-advertise-address=172.22.34.212 --apiserver-bind-port=8443"
	I0408 18:54:23.022307    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token kqgs6j.q4ws2792ks88b89m --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m02 --control-plane --apiserver-advertise-address=172.22.34.212 --apiserver-bind-port=8443": (40.1616065s)
	I0408 18:54:23.022430    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0408 18:54:23.827125    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400-m02 minikube.k8s.io/updated_at=2025_04_08T18_54_23_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=false
	I0408 18:54:24.006525    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-089400-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0408 18:54:24.177293    9012 start.go:319] duration metric: took 46.7150453s to joinCluster
	I0408 18:54:24.177500    9012 start.go:235] Will wait 6m0s for node &{Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:54:24.178359    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:54:24.182100    9012 out.go:177] * Verifying Kubernetes components...
	I0408 18:54:24.195483    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:54:24.557638    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:54:24.587442    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:54:24.588237    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0408 18:54:24.588237    9012 kubeadm.go:483] Overriding stale ClientConfig host https://172.22.47.254:8443 with https://172.22.47.59:8443
	I0408 18:54:24.589260    9012 node_ready.go:35] waiting up to 6m0s for node "ha-089400-m02" to be "Ready" ...
	I0408 18:54:24.589260    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:24.589260    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:24.589260    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:24.589260    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:24.611608    9012 round_trippers.go:581] Response Status: 200 OK in 22 milliseconds
	I0408 18:54:25.090289    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:25.090289    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:25.090289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:25.090289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:25.104766    9012 round_trippers.go:581] Response Status: 200 OK in 14 milliseconds
	I0408 18:54:25.589441    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:25.589441    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:25.589441    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:25.589441    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:25.598744    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:26.092857    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:26.092857    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:26.092857    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:26.092857    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:26.099814    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:26.590872    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:26.590872    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:26.590872    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:26.590872    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:26.595446    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:26.596484    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:27.090561    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:27.090561    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:27.090561    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:27.090561    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:27.097508    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:27.589624    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:27.589624    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:27.589624    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:27.589624    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:27.603086    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:54:28.090321    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:28.090321    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:28.090321    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:28.090321    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:28.099946    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:28.589628    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:28.589628    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:28.589628    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:28.589628    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:28.881039    9012 round_trippers.go:581] Response Status: 200 OK in 291 milliseconds
	I0408 18:54:28.881670    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:29.089605    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:29.090080    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:29.090080    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:29.090080    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:29.105866    9012 round_trippers.go:581] Response Status: 200 OK in 15 milliseconds
	I0408 18:54:29.589760    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:29.589760    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:29.589760    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:29.589760    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:29.594937    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:30.090212    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:30.090212    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:30.090212    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:30.090212    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:30.130855    9012 round_trippers.go:581] Response Status: 200 OK in 40 milliseconds
	I0408 18:54:30.590698    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:30.590698    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:30.590698    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:30.590698    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:30.595524    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:31.090135    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:31.090135    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:31.090135    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:31.090135    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:31.095959    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:31.096190    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:31.589728    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:31.590182    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:31.590289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:31.590289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:31.597113    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:32.089598    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:32.089598    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:32.089598    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:32.089598    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:32.096441    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:32.590029    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:32.590029    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:32.590083    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:32.590083    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:32.595155    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:33.090794    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:33.090874    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:33.090874    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:33.090874    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:33.099681    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:54:33.100480    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:33.590768    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:33.590816    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:33.590816    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:33.590877    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:33.595389    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:34.089983    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:34.090068    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:34.090068    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:34.090068    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:34.096670    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:34.590231    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:34.590231    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:34.590231    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:34.590231    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:34.595061    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:35.089973    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:35.089973    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:35.089973    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:35.089973    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:35.095889    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:35.590183    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:35.590183    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:35.590183    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:35.590183    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:35.595765    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:35.595999    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:36.089996    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:36.089996    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:36.089996    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:36.089996    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:36.095798    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:36.589484    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:36.589484    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:36.589484    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:36.589484    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:36.596471    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:37.090157    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:37.090232    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:37.090232    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:37.090232    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:37.095769    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:37.590533    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:37.590533    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:37.590533    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:37.590533    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:37.598562    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:37.599073    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:38.089788    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:38.089788    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:38.089788    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:38.089788    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:38.095397    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:38.589721    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:38.589721    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:38.589721    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:38.589721    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:38.595079    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:39.090103    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:39.090103    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:39.090103    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:39.090544    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:39.097202    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:39.591040    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:39.591114    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:39.591114    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:39.591114    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:39.600507    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:39.601066    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:40.090430    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:40.090430    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:40.090430    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:40.090430    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:40.096432    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:40.590886    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:40.590886    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:40.590886    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:40.590979    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:40.596101    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:41.090077    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:41.090077    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:41.090077    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:41.090077    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:41.098578    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:54:41.589430    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:41.590240    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:41.590240    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:41.590240    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:41.597360    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:42.091166    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:42.091237    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:42.091237    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:42.091237    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:42.097727    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:42.097727    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:42.589948    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:42.589948    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:42.589948    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:42.589948    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:42.595105    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:43.090478    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:43.090564    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:43.090640    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:43.090640    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:43.099930    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:43.590383    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:43.590383    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:43.590383    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:43.590383    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:43.596247    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:44.090544    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:44.090544    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:44.090544    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:44.090544    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:44.097507    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:44.097905    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:44.590903    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:44.590903    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:44.590903    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:44.591007    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:44.595808    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.090138    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.090260    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.090260    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.090260    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.095482    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:45.589489    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.589489    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.589489    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.589489    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.608446    9012 round_trippers.go:581] Response Status: 200 OK in 18 milliseconds
	I0408 18:54:45.608875    9012 node_ready.go:49] node "ha-089400-m02" has status "Ready":"True"
	I0408 18:54:45.608875    9012 node_ready.go:38] duration metric: took 21.0194473s for node "ha-089400-m02" to be "Ready" ...
	I0408 18:54:45.608972    9012 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:54:45.609103    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:45.609103    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.609103    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.609103    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.613715    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.616450    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.616978    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7f57d
	I0408 18:54:45.616978    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.616978    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.616978    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.621824    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.623363    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.623422    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.623467    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.623467    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.627217    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.627217    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.627217    9012 pod_ready.go:82] duration metric: took 10.7675ms for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.627217    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.627217    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7v5zn
	I0408 18:54:45.627217    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.627217    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.627217    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.631872    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.632852    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.632907    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.632907    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.632907    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.640893    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:45.641232    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.641306    9012 pod_ready.go:82] duration metric: took 14.0887ms for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.641306    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.641473    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400
	I0408 18:54:45.641473    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.641575    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.641575    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.645693    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.646561    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.646619    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.646619    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.646619    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.650348    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.650976    9012 pod_ready.go:93] pod "etcd-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.650976    9012 pod_ready.go:82] duration metric: took 9.583ms for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.651035    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.651131    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m02
	I0408 18:54:45.651131    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.651131    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.651208    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.655317    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.655911    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.655969    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.655969    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.655969    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.659487    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.659487    9012 pod_ready.go:93] pod "etcd-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.659487    9012 pod_ready.go:82] duration metric: took 8.4518ms for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.659487    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.790439    9012 request.go:661] Waited for 130.9515ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:54:45.790439    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:54:45.790439    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.790439    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.790439    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.796569    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:45.989803    9012 request.go:661] Waited for 192.5668ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.989803    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.989803    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.989803    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.989803    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.995353    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:45.995823    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.995851    9012 pod_ready.go:82] duration metric: took 336.3616ms for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.995899    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.190573    9012 request.go:661] Waited for 194.6308ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:54:46.190573    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:54:46.190573    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.190573    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.190573    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.197272    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:46.389964    9012 request.go:661] Waited for 192.1505ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:46.390524    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:46.390524    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.390524    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.390524    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.396007    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:46.396573    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:46.396573    9012 pod_ready.go:82] duration metric: took 400.6704ms for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.396573    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.590111    9012 request.go:661] Waited for 193.4139ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:54:46.590111    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:54:46.590111    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.590111    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.590111    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.596242    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:46.789598    9012 request.go:661] Waited for 192.9162ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:46.789598    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:46.789598    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.789598    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.789598    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.795177    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:46.795498    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:46.795570    9012 pod_ready.go:82] duration metric: took 398.9944ms for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.795570    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.989564    9012 request.go:661] Waited for 193.8921ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:54:46.989564    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:54:46.989564    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.989564    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.989564    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.995000    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.190059    9012 request.go:661] Waited for 194.6675ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.190059    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.190059    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.190059    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.190059    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.195664    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.196609    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.196665    9012 pod_ready.go:82] duration metric: took 401.092ms for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.196738    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.390057    9012 request.go:661] Waited for 193.2515ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:54:47.390555    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:54:47.390593    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.390593    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.390653    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.395742    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.589928    9012 request.go:661] Waited for 193.6789ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.589928    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.589928    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.589928    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.589928    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.596408    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:47.596946    9012 pod_ready.go:93] pod "kube-proxy-c4hjd" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.597013    9012 pod_ready.go:82] duration metric: took 400.2716ms for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.597013    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.790222    9012 request.go:661] Waited for 193.0608ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:54:47.790222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:54:47.790222    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.790222    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.790222    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.794932    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:47.990470    9012 request.go:661] Waited for 195.0792ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:47.990470    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:47.990470    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.990470    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.990470    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.996802    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:47.997277    9012 pod_ready.go:93] pod "kube-proxy-gf6wz" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.997277    9012 pod_ready.go:82] duration metric: took 400.2606ms for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.997394    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.190530    9012 request.go:661] Waited for 193.0561ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:54:48.191093    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:54:48.191093    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.191093    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.191093    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.196270    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:48.389627    9012 request.go:661] Waited for 192.8356ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:48.389627    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:48.389627    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.389627    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.389627    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.396969    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:48.397434    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:48.397509    9012 pod_ready.go:82] duration metric: took 400.1122ms for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.397509    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.590287    9012 request.go:661] Waited for 192.6414ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:54:48.590287    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:54:48.590287    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.590287    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.590287    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.597461    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:48.790108    9012 request.go:661] Waited for 192.2464ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:48.790108    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:48.790108    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.790108    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.790108    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.796253    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:48.797464    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:48.797558    9012 pod_ready.go:82] duration metric: took 400.0455ms for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.797558    9012 pod_ready.go:39] duration metric: took 3.1885608s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:54:48.797734    9012 api_server.go:52] waiting for apiserver process to appear ...
	I0408 18:54:48.808921    9012 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 18:54:48.837674    9012 api_server.go:72] duration metric: took 24.6597963s to wait for apiserver process to appear ...
	I0408 18:54:48.837766    9012 api_server.go:88] waiting for apiserver healthz status ...
	I0408 18:54:48.837832    9012 api_server.go:253] Checking apiserver healthz at https://172.22.47.59:8443/healthz ...
	I0408 18:54:48.854654    9012 api_server.go:279] https://172.22.47.59:8443/healthz returned 200:
	ok
	I0408 18:54:48.854820    9012 round_trippers.go:470] GET https://172.22.47.59:8443/version
	I0408 18:54:48.854820    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.854820    9012 round_trippers.go:480]     Accept: application/json, */*
	I0408 18:54:48.854820    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.856697    9012 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0408 18:54:48.856885    9012 api_server.go:141] control plane version: v1.32.2
	I0408 18:54:48.856885    9012 api_server.go:131] duration metric: took 19.1193ms to wait for apiserver health ...
	I0408 18:54:48.856954    9012 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 18:54:48.990014    9012 request.go:661] Waited for 133.0596ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:48.990014    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:48.990014    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.990014    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.990014    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.996971    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:48.999900    9012 system_pods.go:59] 17 kube-system pods found
	I0408 18:54:49.000002    9012 system_pods.go:61] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:54:49.000002    9012 system_pods.go:61] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:54:49.000002    9012 system_pods.go:61] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:54:49.000479    9012 system_pods.go:61] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:54:49.000536    9012 system_pods.go:74] duration metric: took 143.5809ms to wait for pod list to return data ...
	I0408 18:54:49.000536    9012 default_sa.go:34] waiting for default service account to be created ...
	I0408 18:54:49.190030    9012 request.go:661] Waited for 189.261ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:54:49.190030    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:54:49.190030    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.190030    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.190030    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.196049    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:49.196390    9012 default_sa.go:45] found service account: "default"
	I0408 18:54:49.196390    9012 default_sa.go:55] duration metric: took 195.8525ms for default service account to be created ...
	I0408 18:54:49.196390    9012 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 18:54:49.390214    9012 request.go:661] Waited for 193.6791ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:49.390214    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:49.390214    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.390214    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.390214    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.395692    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:49.398072    9012 system_pods.go:86] 17 kube-system pods found
	I0408 18:54:49.398072    9012 system_pods.go:89] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:54:49.398206    9012 system_pods.go:126] duration metric: took 201.8142ms to wait for k8s-apps to be running ...
	I0408 18:54:49.398275    9012 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 18:54:49.409314    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:54:49.434819    9012 system_svc.go:56] duration metric: took 36.5443ms WaitForService to wait for kubelet
	I0408 18:54:49.434900    9012 kubeadm.go:582] duration metric: took 25.2570168s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:54:49.434945    9012 node_conditions.go:102] verifying NodePressure condition ...
	I0408 18:54:49.590332    9012 request.go:661] Waited for 155.3278ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes
	I0408 18:54:49.590332    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes
	I0408 18:54:49.590332    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.590332    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.590332    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.597343    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:49.597343    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:54:49.597343    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:54:49.597343    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:54:49.597343    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:54:49.597343    9012 node_conditions.go:105] duration metric: took 162.3966ms to run NodePressure ...
	I0408 18:54:49.597343    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:54:49.597343    9012 start.go:255] writing updated cluster config ...
	I0408 18:54:49.601343    9012 out.go:201] 
	I0408 18:54:49.628374    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:54:49.628732    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:54:49.639624    9012 out.go:177] * Starting "ha-089400-m03" control-plane node in "ha-089400" cluster
	I0408 18:54:49.644132    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:54:49.644576    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:54:49.644794    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:54:49.645069    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:54:49.645280    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:54:49.654517    9012 start.go:360] acquireMachinesLock for ha-089400-m03: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:54:49.655372    9012 start.go:364] duration metric: took 294.7µs to acquireMachinesLock for "ha-089400-m03"
	I0408 18:54:49.655372    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false insp
ektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror:
DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:54:49.655372    9012 start.go:125] createHost starting for "m03" (driver="hyperv")
	I0408 18:54:49.659242    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:54:49.659242    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:54:49.660234    9012 client.go:168] LocalClient.Create starting
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:54:49.661237    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:54:49.661237    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:54:49.661237    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:54:53.454949    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:54:53.455426    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:53.455522    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:54:54.994058    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:54:54.994058    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:54.994659    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:54:58.885321    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:54:58.885610    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:58.887874    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:54:59.381732    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:54:59.981263    9012 main.go:141] libmachine: Creating VM...
	I0408 18:54:59.981263    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:55:02.984454    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:55:02.984907    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:02.984907    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:55:02.984907    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:55:04.789345    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:55:04.789647    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:04.789647    9012 main.go:141] libmachine: Creating VHD
	I0408 18:55:04.789740    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:55:08.599487    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 92E34E2C-7C02-4340-AA80-28FCC82AAE37
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:55:08.599628    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:08.599628    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:55:08.599628    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:55:08.613873    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:55:11.860797    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:11.860797    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:11.861881    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd' -SizeBytes 20000MB
	I0408 18:55:14.473440    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:14.474496    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:14.474551    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:55:18.203349    9012 main.go:141] libmachine: [stdout =====>] : 
	Name          State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----          ----- ----------- ----------------- ------   ------             -------
	ha-089400-m03 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:55:18.203522    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:18.203522    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400-m03 -DynamicMemoryEnabled $false
	I0408 18:55:20.572148    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:20.572303    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:20.572303    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400-m03 -Count 2
	I0408 18:55:22.816261    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:22.816261    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:22.817014    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\boot2docker.iso'
	I0408 18:55:25.486808    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:25.486808    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:25.486938    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd'
	I0408 18:55:28.228207    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:28.228699    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:28.228699    9012 main.go:141] libmachine: Starting VM...
	I0408 18:55:28.228699    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400-m03
	I0408 18:55:31.446225    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:31.446225    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:31.446225    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:55:31.446939    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:33.795918    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:33.795918    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:33.796775    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:36.382194    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:36.382285    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:37.383291    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:39.660623    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:39.660623    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:39.660764    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:42.327572    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:42.328082    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:43.328946    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:45.622469    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:45.622469    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:45.623158    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:48.239837    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:48.239837    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:49.240763    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:51.579977    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:51.580719    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:51.580719    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:54.183684    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:54.183684    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:55.184757    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:00.167685    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:00.167685    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:00.168199    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:02.364246    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:02.364718    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:02.364718    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:56:02.364817    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:04.585267    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:04.585649    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:04.585737    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:07.201924    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:07.202421    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:07.207326    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:07.208443    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:07.208524    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:56:07.338977    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:56:07.339039    9012 buildroot.go:166] provisioning hostname "ha-089400-m03"
	I0408 18:56:07.339110    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:12.198453    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:12.198816    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:12.204668    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:12.205304    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:12.205304    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400-m03 && echo "ha-089400-m03" | sudo tee /etc/hostname
	I0408 18:56:12.368874    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400-m03
	
	I0408 18:56:12.369426    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:14.557978    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:14.557978    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:14.559040    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:17.207842    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:17.207842    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:17.214498    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:17.215031    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:17.215031    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:56:17.349077    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:56:17.349077    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:56:17.349077    9012 buildroot.go:174] setting up certificates
	I0408 18:56:17.349077    9012 provision.go:84] configureAuth start
	I0408 18:56:17.349077    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:19.547597    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:19.547597    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:19.548399    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:24.366409    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:24.366409    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:24.367159    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:26.998174    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:26.998903    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:26.998903    9012 provision.go:143] copyHostCerts
	I0408 18:56:26.998903    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:56:26.998903    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:56:26.999514    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:56:26.999840    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:56:27.001714    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:56:27.001714    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:56:27.001714    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:56:27.002612    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:56:27.003736    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:56:27.003736    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:56:27.004257    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:56:27.004561    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:56:27.005991    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400-m03 san=[127.0.0.1 172.22.44.49 ha-089400-m03 localhost minikube]
	I0408 18:56:27.342248    9012 provision.go:177] copyRemoteCerts
	I0408 18:56:27.353317    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:56:27.353317    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:29.585019    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:29.585079    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:29.585079    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:32.253339    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:32.253499    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:32.253681    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:56:32.361404    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0080465s)
	I0408 18:56:32.361495    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:56:32.362055    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1208 bytes)
	I0408 18:56:32.409401    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:56:32.409848    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 18:56:32.465286    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:56:32.465331    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:56:32.518700    9012 provision.go:87] duration metric: took 15.1695014s to configureAuth
	I0408 18:56:32.518700    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:56:32.519677    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:56:32.519677    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:34.712907    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:34.713767    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:34.713767    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:37.328489    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:37.328489    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:37.337772    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:37.337772    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:37.337772    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:56:37.469355    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:56:37.469450    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:56:37.469653    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:56:37.469653    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:39.710086    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:39.710086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:39.710880    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:42.351298    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:42.351298    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:42.356476    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:42.357177    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:42.357177    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.47.59"
	Environment="NO_PROXY=172.22.47.59,172.22.34.212"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:56:42.511048    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.47.59
	Environment=NO_PROXY=172.22.47.59,172.22.34.212
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:56:42.511184    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:44.722275    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:44.722275    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:44.722448    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:47.340986    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:47.341166    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:47.346907    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:47.347594    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:47.347692    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:56:49.606889    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:56:49.606889    9012 machine.go:96] duration metric: took 47.2417942s to provisionDockerMachine
	I0408 18:56:49.606889    9012 client.go:171] duration metric: took 1m59.9456973s to LocalClient.Create
	I0408 18:56:49.606889    9012 start.go:167] duration metric: took 1m59.9466885s to libmachine.API.Create "ha-089400"
	I0408 18:56:49.606889    9012 start.go:293] postStartSetup for "ha-089400-m03" (driver="hyperv")
	I0408 18:56:49.606889    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:56:49.617885    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:56:49.617885    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:51.878845    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:51.878845    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:51.879642    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:54.536473    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:54.536473    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:54.536473    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:56:54.658672    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0407475s)
	I0408 18:56:54.671194    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:56:54.677972    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:56:54.677972    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:56:54.677972    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:56:54.678627    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:56:54.678627    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:56:54.690343    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:56:54.710088    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:56:54.756799    9012 start.go:296] duration metric: took 5.1498693s for postStartSetup
	I0408 18:56:54.760294    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:56.985684    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:56.985684    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:56.986556    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:59.645047    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:59.645047    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:59.645659    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:56:59.648173    9012 start.go:128] duration metric: took 2m9.9917629s to createHost
	I0408 18:56:59.648332    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:01.873316    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:01.873316    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:01.874193    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:04.481493    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:04.481720    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:04.490469    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:57:04.490469    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:57:04.490469    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:57:04.620763    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138624.626207453
	
	I0408 18:57:04.620867    9012 fix.go:216] guest clock: 1744138624.626207453
	I0408 18:57:04.620867    9012 fix.go:229] Guest: 2025-04-08 18:57:04.626207453 +0000 UTC Remote: 2025-04-08 18:56:59.6482541 +0000 UTC m=+572.350531301 (delta=4.977953353s)
	I0408 18:57:04.620867    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:09.456551    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:09.457024    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:09.463156    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:57:09.463246    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:57:09.463246    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138624
	I0408 18:57:09.606355    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:57:04 UTC 2025
	
	I0408 18:57:09.606413    9012 fix.go:236] clock set: Tue Apr  8 18:57:04 UTC 2025
	 (err=<nil>)
	I0408 18:57:09.606413    9012 start.go:83] releasing machines lock for "ha-089400-m03", held for 2m19.9499241s
	I0408 18:57:09.606695    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:11.791247    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:11.791247    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:11.791962    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:14.417713    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:14.418406    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:14.421857    9012 out.go:177] * Found network options:
	I0408 18:57:14.424668    9012 out.go:177]   - NO_PROXY=172.22.47.59,172.22.34.212
	W0408 18:57:14.428989    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.428989    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:57:14.431299    9012 out.go:177]   - NO_PROXY=172.22.47.59,172.22.34.212
	W0408 18:57:14.433514    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.433514    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.434759    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.434759    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:57:14.436661    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:57:14.436661    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:14.445998    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 18:57:14.445998    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:19.497145    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:19.497221    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:19.497317    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:57:19.529961    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:19.529961    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:19.529961    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:57:19.590254    9012 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.1442155s)
	W0408 18:57:19.590314    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:57:19.605254    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:57:19.610725    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1740233s)
	W0408 18:57:19.610725    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:57:19.639489    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:57:19.639489    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:57:19.639489    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:57:19.690681    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 18:57:19.723673    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:57:19.746185    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:57:19.757685    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	W0408 18:57:19.771977    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:57:19.771977    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:57:19.790985    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:57:19.821175    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:57:19.856008    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:57:19.888368    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:57:19.919485    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:57:19.954925    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:57:19.987391    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:57:20.018411    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:57:20.036318    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:57:20.053328    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:57:20.086862    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:57:20.118234    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:20.338038    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:57:20.372729    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:57:20.385964    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:57:20.422747    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:57:20.455789    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:57:20.506708    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:57:20.541861    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:57:20.578644    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:57:20.641182    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:57:20.668979    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:57:20.716621    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:57:20.734585    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:57:20.751983    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:57:20.798385    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:57:21.009212    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:57:21.211789    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:57:21.211789    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:57:21.257255    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:21.452670    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:57:24.049676    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.5969857s)
	I0408 18:57:24.062881    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:57:24.102032    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:57:24.138499    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:57:24.349941    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:57:24.574649    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:24.773591    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:57:24.812527    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:57:24.848960    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:25.060559    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:57:25.180808    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:57:25.192910    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:57:25.201786    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:57:25.215756    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:57:25.235084    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:57:25.302200    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:57:25.311559    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:57:25.358622    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:57:25.399691    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:57:25.402014    9012 out.go:177]   - env NO_PROXY=172.22.47.59
	I0408 18:57:25.404598    9012 out.go:177]   - env NO_PROXY=172.22.47.59,172.22.34.212
	I0408 18:57:25.406942    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:57:25.417053    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:57:25.417053    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:57:25.429955    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:57:25.437123    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:57:25.458585    9012 mustload.go:65] Loading cluster: ha-089400
	I0408 18:57:25.459419    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:57:25.459866    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:27.640199    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:27.640199    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:27.640199    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:57:27.641446    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.44.49
	I0408 18:57:27.641532    9012 certs.go:194] generating shared ca certs ...
	I0408 18:57:27.641532    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:27.642016    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:57:27.642584    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:57:27.643000    9012 certs.go:256] generating profile certs ...
	I0408 18:57:27.643527    9012 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:57:27.643614    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b
	I0408 18:57:27.643614    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.34.212 172.22.44.49 172.22.47.254]
	I0408 18:57:28.118513    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b ...
	I0408 18:57:28.118513    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b: {Name:mkab25feccbb47b663d6b8f66287792a65330ab5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:28.119480    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b ...
	I0408 18:57:28.119480    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b: {Name:mk5631061daee2ad7b71de1f1e07500b32b56953 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:28.121175    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:57:28.138147    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:57:28.140163    9012 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:57:28.143814    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:57:28.144834    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:57:28.145512    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:57:28.145729    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:57:28.145729    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:57:28.145729    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:57:28.146399    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:57:28.146833    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:57:28.147450    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:57:28.147769    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:57:28.148185    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:28.148289    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:57:28.148631    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:30.345672    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:30.346680    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:30.346741    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:32.989097    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:57:32.989097    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:32.989747    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:57:33.088565    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0408 18:57:33.098689    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0408 18:57:33.131324    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0408 18:57:33.139068    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0408 18:57:33.174910    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0408 18:57:33.181899    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0408 18:57:33.214005    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0408 18:57:33.220687    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0408 18:57:33.260034    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0408 18:57:33.268924    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0408 18:57:33.301239    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0408 18:57:33.307285    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0408 18:57:33.327871    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:57:33.379676    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:57:33.431755    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:57:33.476802    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:57:33.525863    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0408 18:57:33.572048    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:57:33.622775    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:57:33.668995    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:57:33.716097    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:57:33.765127    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:57:33.812274    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:57:33.860163    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0408 18:57:33.892180    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0408 18:57:33.922406    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0408 18:57:33.953869    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0408 18:57:33.982610    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0408 18:57:34.013819    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0408 18:57:34.049345    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0408 18:57:34.098860    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:57:34.119302    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:57:34.151356    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.158638    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.169854    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.190202    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:57:34.221798    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:57:34.255567    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.262754    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.274318    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.297139    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:57:34.327129    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:57:34.364211    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.372316    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.384079    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.405839    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:57:34.436146    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:57:34.442747    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:57:34.442747    9012 kubeadm.go:934] updating node {m03 172.22.44.49 8443 v1.32.2 docker true true} ...
	I0408 18:57:34.443447    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.44.49
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:57:34.443447    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:57:34.457668    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:57:34.487506    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:57:34.487621    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:57:34.499390    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:57:34.520019    9012 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.32.2: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.32.2': No such file or directory
	
	Initiating transfer...
	I0408 18:57:34.531309    9012 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.32.2
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm.sha256
	I0408 18:57:34.551707    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm -> /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet.sha256
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl.sha256
	I0408 18:57:34.552541    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl -> /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:57:34.566871    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:57:34.567867    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:57:34.569497    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:57:34.575644    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubeadm: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubeadm': No such file or directory
	I0408 18:57:34.575644    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm --> /var/lib/minikube/binaries/v1.32.2/kubeadm (70942872 bytes)
	I0408 18:57:34.628587    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet -> /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:57:34.628587    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubectl': No such file or directory
	I0408 18:57:34.628829    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl --> /var/lib/minikube/binaries/v1.32.2/kubectl (57323672 bytes)
	I0408 18:57:34.642503    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:57:34.682115    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubelet: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubelet': No such file or directory
	I0408 18:57:34.682227    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet --> /var/lib/minikube/binaries/v1.32.2/kubelet (77406468 bytes)
	I0408 18:57:36.002421    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0408 18:57:36.021630    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (312 bytes)
	I0408 18:57:36.054894    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:57:36.089926    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0408 18:57:36.137467    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:57:36.144566    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:57:36.179081    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:36.398442    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:57:36.435035    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:57:36.435952    9012 start.go:317] joinCluster: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:def
ault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget
:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOpti
mizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:57:36.435952    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0408 18:57:36.435952    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:38.631812    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:38.631812    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:38.632906    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:41.313133    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:57:41.313308    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:41.313390    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:57:41.532765    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0": (5.0967729s)
	I0408 18:57:41.533942    9012 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:57:41.533942    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 3txpsc.ry8va8iswsjgjcej --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m03 --control-plane --apiserver-advertise-address=172.22.44.49 --apiserver-bind-port=8443"
	I0408 18:58:24.723444    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 3txpsc.ry8va8iswsjgjcej --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m03 --control-plane --apiserver-advertise-address=172.22.44.49 --apiserver-bind-port=8443": (43.1891609s)
	I0408 18:58:24.723587    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0408 18:58:25.714279    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400-m03 minikube.k8s.io/updated_at=2025_04_08T18_58_25_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=false
	I0408 18:58:25.892117    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-089400-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0408 18:58:26.059827    9012 start.go:319] duration metric: took 49.6234835s to joinCluster
	I0408 18:58:26.059827    9012 start.go:235] Will wait 6m0s for node &{Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:58:26.060813    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:58:26.063824    9012 out.go:177] * Verifying Kubernetes components...
	I0408 18:58:26.078811    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:58:26.502070    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:58:26.555832    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:58:26.555832    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0408 18:58:26.555832    9012 kubeadm.go:483] Overriding stale ClientConfig host https://172.22.47.254:8443 with https://172.22.47.59:8443
	I0408 18:58:26.556916    9012 node_ready.go:35] waiting up to 6m0s for node "ha-089400-m03" to be "Ready" ...
	I0408 18:58:26.556916    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:26.556916    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:26.556916    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:26.556916    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:26.574567    9012 round_trippers.go:581] Response Status: 200 OK in 17 milliseconds
	I0408 18:58:27.057716    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:27.057716    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:27.057716    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:27.057716    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:27.066088    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:27.558007    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:27.558074    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:27.558074    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:27.558074    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:27.564660    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:28.057118    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:28.057118    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:28.057118    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:28.057118    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:28.068170    9012 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 18:58:28.557898    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:28.557898    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:28.557898    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:28.557898    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:28.563830    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:28.563830    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:29.057369    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:29.057369    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:29.057369    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:29.057369    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:29.065477    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:29.557993    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:29.557993    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:29.557993    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:29.558109    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:29.577427    9012 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 18:58:30.057653    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:30.057653    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:30.057653    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:30.057653    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:30.063904    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:30.557846    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:30.557870    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:30.557870    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:30.557870    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:30.565528    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:30.566223    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:31.057701    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:31.057701    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:31.057701    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:31.057701    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:31.166220    9012 round_trippers.go:581] Response Status: 200 OK in 108 milliseconds
	I0408 18:58:31.557420    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:31.557420    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:31.557420    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:31.557420    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:31.563677    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:32.057182    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:32.057182    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:32.057182    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:32.057182    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:32.067623    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:32.557428    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:32.557428    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:32.557428    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:32.557428    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:32.562869    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:33.057240    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:33.057240    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:33.057240    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:33.057240    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:33.063773    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:33.063921    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:33.557648    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:33.557648    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:33.557648    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:33.557648    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:33.618078    9012 round_trippers.go:581] Response Status: 200 OK in 60 milliseconds
	I0408 18:58:34.057142    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:34.057142    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:34.057142    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:34.057142    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:34.068528    9012 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 18:58:34.557191    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:34.557191    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:34.557191    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:34.557191    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:34.569955    9012 round_trippers.go:581] Response Status: 200 OK in 12 milliseconds
	I0408 18:58:35.057623    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:35.057623    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:35.057623    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:35.057623    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:35.064281    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:35.064786    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:35.557087    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:35.557087    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:35.557087    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:35.557087    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:35.565250    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:36.057772    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:36.057772    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:36.057772    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:36.057772    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:36.063816    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:36.558033    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:36.558033    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:36.558033    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:36.558033    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:36.563695    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.057420    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:37.057420    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:37.057900    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:37.057900    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:37.063183    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.557506    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:37.557506    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:37.557506    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:37.557506    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:37.562983    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.563546    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:38.057305    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:38.057305    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:38.057305    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:38.057305    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:38.063559    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:38.557964    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:38.557964    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:38.557964    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:38.558055    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:38.566553    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:39.057835    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:39.057925    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:39.057925    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:39.057925    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:39.063365    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:39.557139    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:39.557139    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:39.557139    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:39.557139    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:39.563297    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:40.057668    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:40.057668    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:40.058112    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:40.058112    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:40.063774    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:40.063808    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:40.557027    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:40.557027    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:40.557027    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:40.557027    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:40.561855    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:41.057228    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:41.057228    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:41.057228    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:41.057228    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:41.065918    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:41.558287    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:41.558287    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:41.558287    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:41.558377    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:41.563341    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:42.057440    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:42.057440    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:42.057440    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:42.057440    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:42.065554    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:42.066588    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:42.558304    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:42.558401    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:42.558401    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:42.558401    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:42.564688    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:43.057225    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:43.057225    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:43.057225    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:43.057225    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:43.063203    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:43.558245    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:43.558336    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:43.558336    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:43.558336    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:43.563347    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:44.057407    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:44.057407    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:44.057407    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:44.057407    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:44.063500    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:44.557613    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:44.557613    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:44.557613    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:44.557613    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:44.564906    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:44.566019    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:45.057289    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:45.057289    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:45.057289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:45.057289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:45.061835    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:45.558244    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:45.558311    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:45.558373    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:45.558373    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:45.562822    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:46.057400    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:46.057400    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:46.057400    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:46.057400    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:46.066945    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:46.558118    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:46.558118    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:46.558118    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:46.558118    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:46.564500    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:47.057403    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:47.057403    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:47.057403    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:47.057403    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:47.063190    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:47.063734    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:47.557393    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:47.557393    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:47.557393    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:47.557393    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:47.563095    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:48.058596    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:48.058739    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:48.058739    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:48.058739    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:48.064140    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:48.557624    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:48.557624    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:48.557624    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:48.557624    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:48.563920    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:49.058063    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:49.058178    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:49.058178    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:49.058178    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:49.063346    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:49.557756    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:49.557756    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:49.557756    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:49.557756    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:49.567498    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:49.568031    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:50.058138    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.058138    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.058138    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.058138    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.064332    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:50.557611    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.557611    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.557611    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.557611    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.565791    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:50.566051    9012 node_ready.go:49] node "ha-089400-m03" has status "Ready":"True"
	I0408 18:58:50.566199    9012 node_ready.go:38] duration metric: took 24.0090946s for node "ha-089400-m03" to be "Ready" ...
	I0408 18:58:50.566263    9012 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:58:50.566440    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:50.566471    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.566471    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.566471    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.574664    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:50.576627    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.576627    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7f57d
	I0408 18:58:50.576627    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.576627    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.576627    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.580609    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:58:50.581595    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.581595    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.581595    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.581595    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.585597    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.586595    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.586595    9012 pod_ready.go:82] duration metric: took 9.968ms for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.586595    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.586595    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7v5zn
	I0408 18:58:50.586595    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.586595    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.586595    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.590699    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.591085    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.591085    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.591148    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.591148    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.594918    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:58:50.595148    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.595148    9012 pod_ready.go:82] duration metric: took 8.5525ms for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.595148    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.595264    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400
	I0408 18:58:50.595264    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.595341    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.595341    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.599719    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.600022    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.600022    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.600022    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.600022    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.602649    9012 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 18:58:50.603743    9012 pod_ready.go:93] pod "etcd-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.603743    9012 pod_ready.go:82] duration metric: took 8.5949ms for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.603743    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.603743    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m02
	I0408 18:58:50.603743    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.603743    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.603743    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.606495    9012 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 18:58:50.608222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:50.608284    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.608284    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.608284    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.612553    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.612553    9012 pod_ready.go:93] pod "etcd-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.612553    9012 pod_ready.go:82] duration metric: took 8.8103ms for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.612553    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.758816    9012 request.go:661] Waited for 146.2615ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m03
	I0408 18:58:50.759305    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m03
	I0408 18:58:50.759305    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.759305    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.759305    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.765654    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:50.957667    9012 request.go:661] Waited for 191.5067ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.957667    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.957667    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.957667    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.957667    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.963746    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:50.964235    9012 pod_ready.go:93] pod "etcd-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.964281    9012 pod_ready.go:82] duration metric: took 351.7248ms for pod "etcd-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.964314    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.158748    9012 request.go:661] Waited for 194.3919ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:58:51.158748    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:58:51.158748    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.159157    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.159157    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.164904    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.358234    9012 request.go:661] Waited for 192.4936ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:51.358234    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:51.358658    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.358739    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.358739    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.363836    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.363836    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:51.363836    9012 pod_ready.go:82] duration metric: took 399.5187ms for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.363836    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.558066    9012 request.go:661] Waited for 194.2287ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:58:51.558066    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:58:51.558066    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.558066    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.558066    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.563265    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.757692    9012 request.go:661] Waited for 193.9221ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:51.757692    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:51.757692    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.757692    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.757692    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.764468    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:51.765070    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:51.765125    9012 pod_ready.go:82] duration metric: took 401.2857ms for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.765125    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.958012    9012 request.go:661] Waited for 192.7415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m03
	I0408 18:58:51.958012    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m03
	I0408 18:58:51.958012    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.958012    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.958012    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.964420    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:52.159050    9012 request.go:661] Waited for 194.6286ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:52.159050    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:52.159438    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.159438    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.159438    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.166788    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:52.167159    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.167159    9012 pod_ready.go:82] duration metric: took 402.0314ms for pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.167159    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.358361    9012 request.go:661] Waited for 191.2004ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:58:52.358361    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:58:52.358361    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.358361    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.358361    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.365052    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:52.558676    9012 request.go:661] Waited for 193.1294ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:52.559187    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:52.559187    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.559187    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.559187    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.565001    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:52.565896    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.565896    9012 pod_ready.go:82] duration metric: took 398.7335ms for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.565896    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.758504    9012 request.go:661] Waited for 192.4303ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:58:52.759025    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:58:52.759025    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.759025    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.759025    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.764070    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:52.958887    9012 request.go:661] Waited for 193.4702ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:52.958887    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:52.958887    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.958887    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.958887    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.969210    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:52.970659    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.970710    9012 pod_ready.go:82] duration metric: took 404.8107ms for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.970710    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.158477    9012 request.go:661] Waited for 187.6617ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m03
	I0408 18:58:53.158477    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m03
	I0408 18:58:53.158477    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.158477    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.158477    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.163516    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:53.357751    9012 request.go:661] Waited for 193.07ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:53.357751    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:53.358342    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.358396    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.358396    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.364143    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:53.364674    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:53.364674    9012 pod_ready.go:82] duration metric: took 393.9615ms for pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.364674    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.558357    9012 request.go:661] Waited for 193.5726ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:58:53.558859    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:58:53.558859    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.558859    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.558859    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.563766    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:53.758429    9012 request.go:661] Waited for 193.9984ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:53.758429    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:53.758897    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.758897    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.758897    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.765902    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:53.766344    9012 pod_ready.go:93] pod "kube-proxy-c4hjd" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:53.766344    9012 pod_ready.go:82] duration metric: took 401.6663ms for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.766526    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-cqx6b" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.957910    9012 request.go:661] Waited for 191.3552ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-cqx6b
	I0408 18:58:53.958458    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-cqx6b
	I0408 18:58:53.958458    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.958458    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.958458    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.963298    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:54.158329    9012 request.go:661] Waited for 194.1424ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:54.158329    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:54.158329    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.158329    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.158329    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.167135    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:54.167576    9012 pod_ready.go:93] pod "kube-proxy-cqx6b" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.167666    9012 pod_ready.go:82] duration metric: took 401.1373ms for pod "kube-proxy-cqx6b" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.167666    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.357803    9012 request.go:661] Waited for 189.9475ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:58:54.357803    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:58:54.357803    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.357803    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.357803    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.363188    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:54.558763    9012 request.go:661] Waited for 194.5535ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.558763    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.558763    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.558763    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.558763    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.569898    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:54.570238    9012 pod_ready.go:93] pod "kube-proxy-gf6wz" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.570329    9012 pod_ready.go:82] duration metric: took 402.6599ms for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.570329    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.757885    9012 request.go:661] Waited for 187.4496ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:58:54.757885    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:58:54.757885    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.757885    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.757885    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.764202    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:54.958088    9012 request.go:661] Waited for 193.4099ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.958088    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.958088    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.958088    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.958088    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.963589    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:54.963960    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.964018    9012 pod_ready.go:82] duration metric: took 393.6855ms for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.964076    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.158025    9012 request.go:661] Waited for 193.8566ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:58:55.158025    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:58:55.158025    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.158025    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.158025    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.164478    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:55.358816    9012 request.go:661] Waited for 193.8344ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:55.359799    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:55.359976    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.360053    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.360106    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.365072    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:55.365523    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:55.365523    9012 pod_ready.go:82] duration metric: took 401.4445ms for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.365624    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.558315    9012 request.go:661] Waited for 192.6887ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m03
	I0408 18:58:55.558792    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m03
	I0408 18:58:55.558858    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.558858    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.558858    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.565082    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:55.758505    9012 request.go:661] Waited for 192.7752ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:55.758505    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:55.758505    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.758505    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.758505    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.766503    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:55.766916    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:55.766976    9012 pod_ready.go:82] duration metric: took 401.3479ms for pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.766976    9012 pod_ready.go:39] duration metric: took 5.2006432s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:58:55.767035    9012 api_server.go:52] waiting for apiserver process to appear ...
	I0408 18:58:55.777864    9012 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 18:58:55.803319    9012 api_server.go:72] duration metric: took 29.7432227s to wait for apiserver process to appear ...
	I0408 18:58:55.803344    9012 api_server.go:88] waiting for apiserver healthz status ...
	I0408 18:58:55.803402    9012 api_server.go:253] Checking apiserver healthz at https://172.22.47.59:8443/healthz ...
	I0408 18:58:55.814276    9012 api_server.go:279] https://172.22.47.59:8443/healthz returned 200:
	ok
	I0408 18:58:55.814471    9012 round_trippers.go:470] GET https://172.22.47.59:8443/version
	I0408 18:58:55.814471    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.814471    9012 round_trippers.go:480]     Accept: application/json, */*
	I0408 18:58:55.814471    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.815814    9012 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0408 18:58:55.815907    9012 api_server.go:141] control plane version: v1.32.2
	I0408 18:58:55.815907    9012 api_server.go:131] duration metric: took 12.5631ms to wait for apiserver health ...
	I0408 18:58:55.815907    9012 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 18:58:55.958767    9012 request.go:661] Waited for 142.8582ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:55.958767    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:55.958767    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.958767    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.958767    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.968028    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:55.970434    9012 system_pods.go:59] 24 kube-system pods found
	I0408 18:58:55.970988    9012 system_pods.go:61] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400-m03" [7a2a97ec-01c9-422a-bfd1-26e763146ff8] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-jtrnl" [e9d6e73b-0bca-4d4c-a306-488d28a5ac38] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400-m03" [3dd306b1-11cd-4e5b-acc8-9cd1fe25236c] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m03" [6f33fa37-94f7-4dda-9a49-c128d0f8555f] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-cqx6b" [f17cfde2-a18e-418f-922f-8d36b3c2b976] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400-m03" [f6ffda8d-6120-48b0-a2ef-0a3212d83fd4] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400-m03" [4d6da5dc-4ccd-4c21-8a42-7373be544476] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:58:55.970988    9012 system_pods.go:74] duration metric: took 155.0796ms to wait for pod list to return data ...
	I0408 18:58:55.970988    9012 default_sa.go:34] waiting for default service account to be created ...
	I0408 18:58:56.158445    9012 request.go:661] Waited for 187.4549ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:58:56.158793    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:58:56.158793    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.158793    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.158793    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.164766    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:56.164766    9012 default_sa.go:45] found service account: "default"
	I0408 18:58:56.164766    9012 default_sa.go:55] duration metric: took 193.7763ms for default service account to be created ...
	I0408 18:58:56.164766    9012 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 18:58:56.358222    9012 request.go:661] Waited for 193.4539ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:56.358222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:56.358222    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.358222    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.358222    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.363606    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:56.366152    9012 system_pods.go:86] 24 kube-system pods found
	I0408 18:58:56.366152    9012 system_pods.go:89] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "etcd-ha-089400-m03" [7a2a97ec-01c9-422a-bfd1-26e763146ff8] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-jtrnl" [e9d6e73b-0bca-4d4c-a306-488d28a5ac38] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-apiserver-ha-089400-m03" [3dd306b1-11cd-4e5b-acc8-9cd1fe25236c] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m03" [6f33fa37-94f7-4dda-9a49-c128d0f8555f] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-proxy-cqx6b" [f17cfde2-a18e-418f-922f-8d36b3c2b976] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400-m03" [f6ffda8d-6120-48b0-a2ef-0a3212d83fd4] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400-m03" [4d6da5dc-4ccd-4c21-8a42-7373be544476] Running
	I0408 18:58:56.366737    9012 system_pods.go:89] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:58:56.366737    9012 system_pods.go:126] duration metric: took 201.9694ms to wait for k8s-apps to be running ...
	I0408 18:58:56.366737    9012 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 18:58:56.377875    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:58:56.405628    9012 system_svc.go:56] duration metric: took 38.8906ms WaitForService to wait for kubelet
	I0408 18:58:56.405694    9012 kubeadm.go:582] duration metric: took 30.3456284s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:58:56.405756    9012 node_conditions.go:102] verifying NodePressure condition ...
	I0408 18:58:56.557669    9012 request.go:661] Waited for 151.8063ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes
	I0408 18:58:56.557669    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes
	I0408 18:58:56.557669    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.557669    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.557669    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.563976    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:105] duration metric: took 159.0026ms to run NodePressure ...
	I0408 18:58:56.564760    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:58:56.564760    9012 start.go:255] writing updated cluster config ...
	I0408 18:58:56.578721    9012 ssh_runner.go:195] Run: rm -f paused
	I0408 18:58:56.727889    9012 start.go:600] kubectl: 1.32.3, cluster: 1.32.2 (minor skew: 0)
	I0408 18:58:56.733188    9012 out.go:177] * Done! kubectl is now configured to use "ha-089400" cluster and "default" namespace by default
	
	
	==> Docker <==
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/b25d2f72ffcd40fc1387d63ac907f17f915bef81b8cb3473beaacbc88f842657/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/e32d687b8d5424c201e5c86be4f7e87c86e799fad9931fd10ac8786152ae94a8/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/34766421d15c3e00899ddcc49479c11696dc0e4a373a189c5cbafca2ad9bef91/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.037244286Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.038353572Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.038532370Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.039418559Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398095320Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398223718Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398305317Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398737112Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428088257Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428315254Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428407353Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.429979834Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904122069Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904346270Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904370770Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.905500575Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:36 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:59:36Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/a007107dd26d0a9a88a391781fffcf20b8829b0e935e8a9f8625b68e633045aa/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 18:59:37 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:59:37Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.161294936Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.162745343Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.162938144Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.163240945Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED              STATE               NAME                      ATTEMPT             POD ID              POD
	e05ddb65240a1       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   About a minute ago   Running             busybox                   0                   a007107dd26d0       busybox-58667487b6-d76nt
	24b3936e36154       c69fa2e9cbf5f                                                                                         9 minutes ago        Running             coredns                   0                   e32d687b8d542       coredns-668d6bf9bc-7f57d
	991a5e9234386       c69fa2e9cbf5f                                                                                         9 minutes ago        Running             coredns                   0                   34766421d15c3       coredns-668d6bf9bc-7v5zn
	ad2a3890b583d       6e38f40d628db                                                                                         9 minutes ago        Running             storage-provisioner       0                   b25d2f72ffcd4       storage-provisioner
	ea0911610b904       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              9 minutes ago        Running             kindnet-cni               0                   eb19661019a60       kindnet-tsg62
	1fa28a71c2bd6       f1332858868e1                                                                                         10 minutes ago       Running             kube-proxy                0                   2523814f4415a       kube-proxy-gf6wz
	b1828f8a97fe6       ghcr.io/kube-vip/kube-vip@sha256:e01c90bcdd3eb37a46aaf04f6c86cca3e66dd0db7a231f3c8e8aa105635c158a     10 minutes ago       Running             kube-vip                  0                   b65dfebdf40c5       kube-vip-ha-089400
	f13d6fc3e9492       85b7a174738ba                                                                                         10 minutes ago       Running             kube-apiserver            0                   6040dca8f6bf8       kube-apiserver-ha-089400
	6969b47aa676b       d8e673e7c9983                                                                                         10 minutes ago       Running             kube-scheduler            0                   e045296fcc947       kube-scheduler-ha-089400
	64cc2cd29fdc7       b6a454c5a800d                                                                                         10 minutes ago       Running             kube-controller-manager   0                   25a3cf2e261a7       kube-controller-manager-ha-089400
	dd0000ba2b8fc       a9e7e6b294baf                                                                                         10 minutes ago       Running             etcd                      0                   82a6814d9840b       etcd-ha-089400
	
	
	==> coredns [24b3936e3615] <==
	[INFO] 10.244.0.4:46731 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000197401s
	[INFO] 10.244.1.2:57130 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000228401s
	[INFO] 10.244.1.2:51995 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000149101s
	[INFO] 10.244.1.2:53167 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000151401s
	[INFO] 10.244.2.2:60530 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000172901s
	[INFO] 10.244.2.2:55628 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000147001s
	[INFO] 10.244.2.2:46073 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000154901s
	[INFO] 10.244.2.2:49601 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,aa,rd,ra 111 0.0000672s
	[INFO] 10.244.2.2:48545 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000096601s
	[INFO] 10.244.0.4:56197 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000190201s
	[INFO] 10.244.0.4:40945 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000384702s
	[INFO] 10.244.1.2:42589 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000340401s
	[INFO] 10.244.1.2:33657 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000157001s
	[INFO] 10.244.1.2:43921 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0001399s
	[INFO] 10.244.2.2:54742 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000284102s
	[INFO] 10.244.2.2:51046 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000296302s
	[INFO] 10.244.2.2:51819 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0001481s
	[INFO] 10.244.0.4:39976 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.0001915s
	[INFO] 10.244.0.4:33463 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000206901s
	[INFO] 10.244.0.4:47150 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0001754s
	[INFO] 10.244.1.2:37200 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000286201s
	[INFO] 10.244.1.2:33497 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000293201s
	[INFO] 10.244.1.2:37779 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0000742s
	[INFO] 10.244.2.2:42787 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000173401s
	[INFO] 10.244.2.2:52825 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0000747s
	
	
	==> coredns [991a5e923438] <==
	[INFO] 10.244.1.2:39417 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd 60 0.000081801s
	[INFO] 10.244.1.2:36458 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.059506783s
	[INFO] 10.244.2.2:38667 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.001155705s
	[INFO] 10.244.2.2:36314 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd 60 0.000176901s
	[INFO] 10.244.0.4:59697 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000229502s
	[INFO] 10.244.0.4:41143 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000245502s
	[INFO] 10.244.0.4:60072 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000385602s
	[INFO] 10.244.0.4:53645 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000273902s
	[INFO] 10.244.0.4:40792 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000203801s
	[INFO] 10.244.1.2:49773 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.117505829s
	[INFO] 10.244.1.2:39680 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000262202s
	[INFO] 10.244.1.2:39040 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000128901s
	[INFO] 10.244.1.2:50379 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.003319015s
	[INFO] 10.244.1.2:47430 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000232401s
	[INFO] 10.244.2.2:56517 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,aa,rd,ra 111 0.000378602s
	[INFO] 10.244.2.2:38313 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000208201s
	[INFO] 10.244.2.2:37461 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0000729s
	[INFO] 10.244.0.4:36721 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000271402s
	[INFO] 10.244.0.4:42158 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000168801s
	[INFO] 10.244.1.2:41784 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000258901s
	[INFO] 10.244.2.2:46777 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000198901s
	[INFO] 10.244.0.4:34099 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000235401s
	[INFO] 10.244.1.2:36014 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.0001672s
	[INFO] 10.244.2.2:54303 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.0001955s
	[INFO] 10.244.2.2:60514 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000224901s
	
	
	==> describe nodes <==
	Name:               ha-089400
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T18_50_37_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:50:32 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:00:38 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:00:06 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:00:06 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:00:06 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:00:06 +0000   Tue, 08 Apr 2025 18:51:04 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.47.59
	  Hostname:    ha-089400
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 b5f60f03694b46f6bab79c049c06cf8e
	  System UUID:                0ea6b7ae-aa8c-764b-840e-6fae09375fe1
	  Boot ID:                    44cd1066-73d1-4beb-9f2d-5ae36e102363
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-d76nt             0 (0%)        0 (0%)      0 (0%)           0 (0%)         69s
	  kube-system                 coredns-668d6bf9bc-7f57d             100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     10m
	  kube-system                 coredns-668d6bf9bc-7v5zn             100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     10m
	  kube-system                 etcd-ha-089400                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         10m
	  kube-system                 kindnet-tsg62                        100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      10m
	  kube-system                 kube-apiserver-ha-089400             250m (12%)    0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-controller-manager-ha-089400    200m (10%)    0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-proxy-gf6wz                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-scheduler-ha-089400             100m (5%)     0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 kube-vip-ha-089400                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         10m
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         9m55s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                950m (47%)   100m (5%)
	  memory             290Mi (13%)  390Mi (18%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 10m                kube-proxy       
	  Normal  NodeHasSufficientPID     10m (x7 over 10m)  kubelet          Node ha-089400 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  10m                kubelet          Updated Node Allocatable limit across pods
	  Normal  Starting                 10m                kubelet          Starting kubelet.
	  Normal  NodeHasSufficientMemory  10m (x8 over 10m)  kubelet          Node ha-089400 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    10m (x8 over 10m)  kubelet          Node ha-089400 status is now: NodeHasNoDiskPressure
	  Normal  Starting                 10m                kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  10m                kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  10m                kubelet          Node ha-089400 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    10m                kubelet          Node ha-089400 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     10m                kubelet          Node ha-089400 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           10m                node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	  Normal  NodeReady                9m39s              kubelet          Node ha-089400 status is now: NodeReady
	  Normal  RegisteredNode           6m13s              node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	  Normal  RegisteredNode           2m12s              node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	
	
	Name:               ha-089400-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T18_54_23_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:54:18 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400-m02
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:00:36 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 18:59:54 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 18:59:54 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 18:59:54 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 18:59:54 +0000   Tue, 08 Apr 2025 18:54:45 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.34.212
	  Hostname:    ha-089400-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 2f82861eb8e545dea9c55a9a05a7b7ba
	  System UUID:                2bd9c8ec-208e-1240-8d27-9f0c6ed89271
	  Boot ID:                    6ffadba8-87d2-406f-a63d-6b7871dd2907
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-lwn24                 0 (0%)        0 (0%)      0 (0%)           0 (0%)         69s
	  kube-system                 etcd-ha-089400-m02                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         6m24s
	  kube-system                 kindnet-r2jmm                            100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      6m25s
	  kube-system                 kube-apiserver-ha-089400-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         6m24s
	  kube-system                 kube-controller-manager-ha-089400-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         6m24s
	  kube-system                 kube-proxy-c4hjd                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m25s
	  kube-system                 kube-scheduler-ha-089400-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         6m24s
	  kube-system                 kube-vip-ha-089400-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         6m24s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (7%)  50Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 6m18s                  kube-proxy       
	  Normal  NodeHasSufficientMemory  6m25s (x8 over 6m25s)  kubelet          Node ha-089400-m02 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    6m25s (x8 over 6m25s)  kubelet          Node ha-089400-m02 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     6m25s (x7 over 6m25s)  kubelet          Node ha-089400-m02 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           6m24s                  node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	  Normal  RegisteredNode           6m13s                  node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	  Normal  RegisteredNode           2m12s                  node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	
	
	Name:               ha-089400-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T18_58_25_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:58:17 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400-m03
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:00:39 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 18:59:49 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 18:59:49 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 18:59:49 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 18:59:49 +0000   Tue, 08 Apr 2025 18:58:50 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.44.49
	  Hostname:    ha-089400-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 3d90b36bb2a74c3982ac76d0daa17ddf
	  System UUID:                59a21209-43a0-d24e-95d9-24be2f7a265f
	  Boot ID:                    684a319b-b7a5-481c-957e-da534b8a608a
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-snc97                 0 (0%)        0 (0%)      0 (0%)           0 (0%)         69s
	  kube-system                 etcd-ha-089400-m03                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         2m25s
	  kube-system                 kindnet-jtrnl                            100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      2m26s
	  kube-system                 kube-apiserver-ha-089400-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         2m25s
	  kube-system                 kube-controller-manager-ha-089400-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         2m25s
	  kube-system                 kube-proxy-cqx6b                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m26s
	  kube-system                 kube-scheduler-ha-089400-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         2m25s
	  kube-system                 kube-vip-ha-089400-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         2m25s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (7%)  50Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 2m19s                  kube-proxy       
	  Normal  NodeHasSufficientMemory  2m26s (x8 over 2m26s)  kubelet          Node ha-089400-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    2m26s (x8 over 2m26s)  kubelet          Node ha-089400-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     2m26s (x7 over 2m26s)  kubelet          Node ha-089400-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  2m26s                  kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           2m24s                  node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	  Normal  RegisteredNode           2m23s                  node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	  Normal  RegisteredNode           2m12s                  node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	
	
	==> dmesg <==
	[  +6.924411] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000001] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 18:49] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.184710] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +31.673598] systemd-fstab-generator[1006]: Ignoring "noauto" option for root device
	[Apr 8 18:50] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.534954] systemd-fstab-generator[1044]: Ignoring "noauto" option for root device
	[  +0.194253] systemd-fstab-generator[1056]: Ignoring "noauto" option for root device
	[  +0.214985] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.864083] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.195304] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.207808] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.265895] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[ +11.074296] systemd-fstab-generator[1433]: Ignoring "noauto" option for root device
	[  +0.104184] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.621588] systemd-fstab-generator[1700]: Ignoring "noauto" option for root device
	[  +6.386990] systemd-fstab-generator[1854]: Ignoring "noauto" option for root device
	[  +0.102161] kauditd_printk_skb: 74 callbacks suppressed
	[  +6.073571] kauditd_printk_skb: 67 callbacks suppressed
	[  +3.465280] systemd-fstab-generator[2371]: Ignoring "noauto" option for root device
	[  +6.734586] kauditd_printk_skb: 17 callbacks suppressed
	[  +7.514381] kauditd_printk_skb: 29 callbacks suppressed
	[Apr 8 18:53] hrtimer: interrupt took 11100806 ns
	[Apr 8 18:54] kauditd_printk_skb: 26 callbacks suppressed
	
	
	==> etcd [dd0000ba2b8f] <==
	{"level":"info","ts":"2025-04-08T18:58:22.498988Z","caller":"traceutil/trace.go:171","msg":"trace[1272509077] range","detail":"{range_begin:; range_end:; response_count:0; response_revision:1551; }","duration":"163.755997ms","start":"2025-04-08T18:58:22.335181Z","end":"2025-04-08T18:58:22.498937Z","steps":["trace[1272509077] 'range keys from in-memory index tree'  (duration: 163.639397ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T18:58:22.981905Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"a471cd1dbcc1855","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"warn","ts":"2025-04-08T18:58:23.983340Z","caller":"etcdhttp/peer.go:150","msg":"failed to promote a member","member-id":"a471cd1dbcc1855","error":"etcdserver: can only promote a learner member which is in sync with leader"}
	{"level":"info","ts":"2025-04-08T18:58:24.502165Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"8c2514f94f31139c switched to configuration voters=(740592351387064405 10098500800456233884 14330270301426153963)"}
	{"level":"info","ts":"2025-04-08T18:58:24.519945Z","caller":"membership/cluster.go:535","msg":"promote member","cluster-id":"d3d96df9e2b0dc8b","local-member-id":"8c2514f94f31139c"}
	{"level":"info","ts":"2025-04-08T18:58:24.520124Z","caller":"etcdserver/server.go:2018","msg":"applied a configuration change through raft","local-member-id":"8c2514f94f31139c","raft-conf-change":"ConfChangeAddNode","raft-conf-change-node-id":"a471cd1dbcc1855"}
	{"level":"info","ts":"2025-04-08T18:58:31.168364Z","caller":"traceutil/trace.go:171","msg":"trace[925808021] transaction","detail":"{read_only:false; response_revision:1588; number_of_response:1; }","duration":"123.951722ms","start":"2025-04-08T18:58:31.044369Z","end":"2025-04-08T18:58:31.168321Z","steps":["trace[925808021] 'process raft request'  (duration: 123.768221ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T18:58:31.171008Z","caller":"traceutil/trace.go:171","msg":"trace[1612953898] linearizableReadLoop","detail":"{readStateIndex:1775; appliedIndex:1776; }","duration":"103.649704ms","start":"2025-04-08T18:58:31.067347Z","end":"2025-04-08T18:58:31.170997Z","steps":["trace[1612953898] 'read index received'  (duration: 103.645704ms)","trace[1612953898] 'applied index is now lower than readState.Index'  (duration: 3.3µs)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T18:58:31.171240Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"103.905305ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/minions/ha-089400-m03\" limit:1 ","response":"range_response_count:1 size:4442"}
	{"level":"info","ts":"2025-04-08T18:58:31.171290Z","caller":"traceutil/trace.go:171","msg":"trace[1708554246] range","detail":"{range_begin:/registry/minions/ha-089400-m03; range_end:; response_count:1; response_revision:1588; }","duration":"103.989607ms","start":"2025-04-08T18:58:31.067292Z","end":"2025-04-08T18:58:31.171282Z","steps":["trace[1708554246] 'agreement among raft nodes before linearized reading'  (duration: 103.764005ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T18:58:33.608392Z","caller":"traceutil/trace.go:171","msg":"trace[1183774225] transaction","detail":"{read_only:false; response_revision:1600; number_of_response:1; }","duration":"302.071044ms","start":"2025-04-08T18:58:33.306303Z","end":"2025-04-08T18:58:33.608374Z","steps":["trace[1183774225] 'process raft request'  (duration: 301.905243ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T18:58:33.608962Z","caller":"v3rpc/interceptor.go:197","msg":"request stats","start time":"2025-04-08T18:58:33.306286Z","time spent":"302.585747ms","remote":"127.0.0.1:33280","response type":"/etcdserverpb.KV/Txn","request count":1,"request size":419,"response count":0,"response size":40,"request content":"compare:<target:MOD key:\"/registry/leases/kube-system/plndr-cp-lock\" mod_revision:1596 > success:<request_put:<key:\"/registry/leases/kube-system/plndr-cp-lock\" value_size:369 >> failure:<request_range:<key:\"/registry/leases/kube-system/plndr-cp-lock\" > >"}
	{"level":"info","ts":"2025-04-08T18:58:33.618887Z","caller":"traceutil/trace.go:171","msg":"trace[288033071] linearizableReadLoop","detail":"{readStateIndex:1788; appliedIndex:1789; }","duration":"154.697993ms","start":"2025-04-08T18:58:33.464008Z","end":"2025-04-08T18:58:33.618706Z","steps":["trace[288033071] 'read index received'  (duration: 154.692593ms)","trace[288033071] 'applied index is now lower than readState.Index'  (duration: 4.2µs)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T18:58:33.619162Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"155.142896ms","expected-duration":"100ms","prefix":"read-only range ","request":"limit:1 keys_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T18:58:33.619255Z","caller":"traceutil/trace.go:171","msg":"trace[2034357586] range","detail":"{range_begin:; range_end:; response_count:0; response_revision:1600; }","duration":"155.195496ms","start":"2025-04-08T18:58:33.464001Z","end":"2025-04-08T18:58:33.619197Z","steps":["trace[2034357586] 'agreement among raft nodes before linearized reading'  (duration: 155.125896ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T18:58:33.619955Z","caller":"traceutil/trace.go:171","msg":"trace[1301126270] transaction","detail":"{read_only:false; response_revision:1601; number_of_response:1; }","duration":"200.292157ms","start":"2025-04-08T18:58:33.419654Z","end":"2025-04-08T18:58:33.619946Z","steps":["trace[1301126270] 'process raft request'  (duration: 200.232956ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T18:58:33.620990Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"149.963566ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/csidrivers/\" range_end:\"/registry/csidrivers0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T18:58:33.621246Z","caller":"traceutil/trace.go:171","msg":"trace[412089820] range","detail":"{range_begin:/registry/csidrivers/; range_end:/registry/csidrivers0; response_count:0; response_revision:1601; }","duration":"150.250767ms","start":"2025-04-08T18:58:33.470985Z","end":"2025-04-08T18:58:33.621235Z","steps":["trace[412089820] 'agreement among raft nodes before linearized reading'  (duration: 149.973665ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T18:58:33.622110Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"118.669485ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T18:58:33.622319Z","caller":"traceutil/trace.go:171","msg":"trace[1126075465] range","detail":"{range_begin:/registry/health; range_end:; response_count:0; response_revision:1601; }","duration":"118.920986ms","start":"2025-04-08T18:58:33.503381Z","end":"2025-04-08T18:58:33.622302Z","steps":["trace[1126075465] 'agreement among raft nodes before linearized reading'  (duration: 118.682485ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T18:59:35.537153Z","caller":"traceutil/trace.go:171","msg":"trace[921481814] range","detail":"{range_begin:/registry/pods/default/busybox-58667487b6-5bbft; range_end:; response_count:1; response_revision:1823; }","duration":"125.585753ms","start":"2025-04-08T18:59:35.411554Z","end":"2025-04-08T18:59:35.537140Z","steps":["trace[921481814] 'agreement among raft nodes before linearized reading'  (duration: 125.523753ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T19:00:29.461101Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1084}
	{"level":"warn","ts":"2025-04-08T19:00:29.640941Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"108.001971ms","expected-duration":"100ms","prefix":"","request":"header:<ID:1753472654461422443 username:\"kube-apiserver-etcd-client\" auth_revision:1 > txn:<compare:<target:MOD key:\"/registry/leases/kube-system/apiserver-agdqlsj2vtk6nn2zn73mmc7zna\" mod_revision:1995 > success:<request_put:<key:\"/registry/leases/kube-system/apiserver-agdqlsj2vtk6nn2zn73mmc7zna\" value_size:605 >> failure:<>>","response":"size:16"}
	{"level":"info","ts":"2025-04-08T19:00:29.653498Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":1084,"took":"191.310935ms","hash":3237656877,"current-db-size-bytes":3710976,"current-db-size":"3.7 MB","current-db-size-in-use-bytes":2252800,"current-db-size-in-use":"2.3 MB"}
	{"level":"info","ts":"2025-04-08T19:00:29.653564Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3237656877,"revision":1084,"compact-revision":-1}
	
	
	==> kernel <==
	 19:00:43 up 12 min,  0 users,  load average: 0.68, 0.79, 0.47
	Linux ha-089400 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [ea0911610b90] <==
	I0408 19:00:00.896097       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:00:10.892970       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:00:10.893106       1 main.go:301] handling current node
	I0408 19:00:10.893217       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:00:10.893583       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:00:10.893973       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:00:10.894003       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:00:20.893347       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:00:20.893458       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:00:20.894099       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:00:20.894217       1 main.go:301] handling current node
	I0408 19:00:20.894251       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:00:20.894621       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:00:30.902952       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:00:30.904281       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:00:30.905006       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:00:30.905038       1 main.go:301] handling current node
	I0408 19:00:30.905054       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:00:30.905060       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:00:40.893655       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:00:40.893933       1 main.go:301] handling current node
	I0408 19:00:40.893960       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:00:40.893970       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:00:40.894581       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:00:40.894724       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	
	
	==> kube-apiserver [f13d6fc3e949] <==
	I0408 18:50:35.714862       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 18:50:35.761737       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 18:50:35.789532       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 18:50:39.820339       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0408 18:50:40.329216       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0408 18:58:18.314332       1 finisher.go:175] "Unhandled Error" err="FinishRequest: post-timeout activity - time-elapsed: 11.1µs, panicked: false, err: context canceled, panic-reason: <nil>" logger="UnhandledError"
	E0408 18:58:18.314713       1 writers.go:123] "Unhandled Error" err="apiserver was unable to write a JSON response: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.316201       1 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"http: Handler timeout\"}: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.317424       1 writers.go:136] "Unhandled Error" err="apiserver was unable to write a fallback JSON response: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.322650       1 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="53.458932ms" method="PATCH" path="/api/v1/namespaces/default/events/ha-089400-m03.18346ce7874a75d5" result=null
	E0408 18:59:43.075414       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58299: use of closed network connection
	E0408 18:59:44.684313       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58301: use of closed network connection
	E0408 18:59:45.292441       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58304: use of closed network connection
	E0408 18:59:45.888901       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58306: use of closed network connection
	E0408 18:59:46.543609       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58308: use of closed network connection
	E0408 18:59:47.050078       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58310: use of closed network connection
	E0408 18:59:47.598997       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58312: use of closed network connection
	E0408 18:59:48.143315       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58314: use of closed network connection
	E0408 18:59:48.674108       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58316: use of closed network connection
	E0408 18:59:49.679279       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58319: use of closed network connection
	E0408 19:00:00.211405       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58321: use of closed network connection
	E0408 19:00:00.761286       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58325: use of closed network connection
	E0408 19:00:11.285375       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58327: use of closed network connection
	E0408 19:00:11.809013       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58330: use of closed network connection
	E0408 19:00:22.351099       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58332: use of closed network connection
	
	
	==> kube-controller-manager [64cc2cd29fdc] <==
	I0408 18:58:48.264812       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 18:58:50.295996       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 18:58:50.322197       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 18:58:50.462104       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 18:59:35.084151       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="169.235847ms"
	I0408 18:59:35.197525       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="113.304899ms"
	I0408 18:59:35.405081       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="201.709088ms"
	I0408 18:59:35.879123       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="473.965687ms"
	E0408 18:59:35.879188       1 replica_set.go:560] "Unhandled Error" err="sync \"default/busybox-58667487b6\" failed with Operation cannot be fulfilled on replicasets.apps \"busybox-58667487b6\": the object has been modified; please apply your changes to the latest version and try again" logger="UnhandledError"
	I0408 18:59:35.880312       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="56µs"
	I0408 18:59:35.885674       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="145.1µs"
	I0408 18:59:35.919889       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="32.2µs"
	I0408 18:59:36.062912       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="34.680552ms"
	I0408 18:59:36.063415       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="171.001µs"
	I0408 18:59:37.401687       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="72.4µs"
	I0408 18:59:38.152748       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="134.301µs"
	I0408 18:59:38.563303       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="29.216632ms"
	I0408 18:59:38.563743       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="85.6µs"
	I0408 18:59:39.424297       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="27.557725ms"
	I0408 18:59:39.424716       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="58.2µs"
	I0408 18:59:40.039726       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="22.0613ms"
	I0408 18:59:40.040213       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="39.5µs"
	I0408 18:59:49.480808       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 18:59:54.426219       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m02"
	I0408 19:00:06.760424       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400"
	
	
	==> kube-proxy [1fa28a71c2bd] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 18:50:42.911256       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 18:50:42.970229       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.47.59"]
	E0408 18:50:42.971294       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 18:50:43.056321       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 18:50:43.056447       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 18:50:43.056480       1 server_linux.go:170] "Using iptables Proxier"
	I0408 18:50:43.061345       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 18:50:43.063633       1 server.go:497] "Version info" version="v1.32.2"
	I0408 18:50:43.063672       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 18:50:43.074263       1 config.go:199] "Starting service config controller"
	I0408 18:50:43.074379       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 18:50:43.074474       1 config.go:105] "Starting endpoint slice config controller"
	I0408 18:50:43.074560       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 18:50:43.075266       1 config.go:329] "Starting node config controller"
	I0408 18:50:43.075300       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 18:50:43.175838       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 18:50:43.176118       1 shared_informer.go:320] Caches are synced for service config
	I0408 18:50:43.176268       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [6969b47aa676] <==
	E0408 18:50:33.552145       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StatefulSet: failed to list *v1.StatefulSet: statefulsets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"statefulsets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 18:50:33.558386       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 18:50:33.558434       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 18:50:33.564850       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 18:50:33.564894       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	I0408 18:50:35.989762       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	E0408 18:58:17.708319       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-cqx6b\": pod kube-proxy-cqx6b is already assigned to node \"ha-089400-m03\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-cqx6b" node="ha-089400-m03"
	E0408 18:58:17.721728       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-jtrnl\": pod kindnet-jtrnl is already assigned to node \"ha-089400-m03\"" plugin="DefaultBinder" pod="kube-system/kindnet-jtrnl" node="ha-089400-m03"
	E0408 18:58:17.724330       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod e9d6e73b-0bca-4d4c-a306-488d28a5ac38(kube-system/kindnet-jtrnl) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-jtrnl"
	E0408 18:58:17.724404       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod f17cfde2-a18e-418f-922f-8d36b3c2b976(kube-system/kube-proxy-cqx6b) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-cqx6b"
	E0408 18:58:17.726947       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-cqx6b\": pod kube-proxy-cqx6b is already assigned to node \"ha-089400-m03\"" pod="kube-system/kube-proxy-cqx6b"
	I0408 18:58:17.728700       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-cqx6b" node="ha-089400-m03"
	E0408 18:58:17.726324       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-jtrnl\": pod kindnet-jtrnl is already assigned to node \"ha-089400-m03\"" pod="kube-system/kindnet-jtrnl"
	I0408 18:58:17.732385       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-jtrnl" node="ha-089400-m03"
	E0408 18:59:35.008691       1 schedule_one.go:954] "Scheduler cache AssumePod failed" err="pod 7ebebfbc-18d5-4df7-b3f4-dd8565c43b81(default/busybox-58667487b6-snc97) is in the cache, so can't be assumed" pod="default/busybox-58667487b6-snc97"
	E0408 18:59:35.009157       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="pod 7ebebfbc-18d5-4df7-b3f4-dd8565c43b81(default/busybox-58667487b6-snc97) is in the cache, so can't be assumed" pod="default/busybox-58667487b6-snc97"
	I0408 18:59:35.009461       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-snc97" node="ha-089400-m03"
	E0408 18:59:35.231897       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-lwn24\": pod busybox-58667487b6-lwn24 is already assigned to node \"ha-089400-m02\"" plugin="DefaultBinder" pod="default/busybox-58667487b6-lwn24" node="ha-089400-m02"
	E0408 18:59:35.232941       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod f1ce7e68-1e3b-4fe5-a9ca-3f45c7bc1954(default/busybox-58667487b6-lwn24) wasn't assumed so cannot be forgotten" pod="default/busybox-58667487b6-lwn24"
	E0408 18:59:35.233205       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-lwn24\": pod busybox-58667487b6-lwn24 is already assigned to node \"ha-089400-m02\"" pod="default/busybox-58667487b6-lwn24"
	I0408 18:59:35.233697       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-lwn24" node="ha-089400-m02"
	E0408 18:59:35.236974       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-d76nt\": pod busybox-58667487b6-d76nt is already assigned to node \"ha-089400\"" plugin="DefaultBinder" pod="default/busybox-58667487b6-d76nt" node="ha-089400"
	E0408 18:59:35.239920       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod 76d5dd43-6aff-4922-b3a8-3663f5c88670(default/busybox-58667487b6-d76nt) wasn't assumed so cannot be forgotten" pod="default/busybox-58667487b6-d76nt"
	E0408 18:59:35.240159       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-d76nt\": pod busybox-58667487b6-d76nt is already assigned to node \"ha-089400\"" pod="default/busybox-58667487b6-d76nt"
	I0408 18:59:35.240386       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-d76nt" node="ha-089400"
	
	
	==> kubelet <==
	Apr 08 18:56:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 18:56:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 18:56:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 18:57:35 ha-089400 kubelet[2378]: E0408 18:57:35.802540    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 18:57:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 18:57:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 18:57:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 18:57:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 18:58:35 ha-089400 kubelet[2378]: E0408 18:58:35.797624    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 18:58:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 18:58:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 18:58:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 18:58:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 18:59:35 ha-089400 kubelet[2378]: I0408 18:59:35.156095    2378 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccjdz\" (UniqueName: \"kubernetes.io/projected/76d5dd43-6aff-4922-b3a8-3663f5c88670-kube-api-access-ccjdz\") pod \"busybox-58667487b6-d76nt\" (UID: \"76d5dd43-6aff-4922-b3a8-3663f5c88670\") " pod="default/busybox-58667487b6-d76nt"
	Apr 08 18:59:35 ha-089400 kubelet[2378]: E0408 18:59:35.797830    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 18:59:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 18:59:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 18:59:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 18:59:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 18:59:38 ha-089400 kubelet[2378]: I0408 18:59:38.538674    2378 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="default/busybox-58667487b6-d76nt" podStartSLOduration=2.924798257 podStartE2EDuration="4.532852221s" podCreationTimestamp="2025-04-08 18:59:34 +0000 UTC" firstStartedPulling="2025-04-08 18:59:36.28580866 +0000 UTC m=+540.743627020" lastFinishedPulling="2025-04-08 18:59:37.893862624 +0000 UTC m=+542.351680984" observedRunningTime="2025-04-08 18:59:38.531932217 +0000 UTC m=+542.989750577" watchObservedRunningTime="2025-04-08 18:59:38.532852221 +0000 UTC m=+542.990670581"
	Apr 08 19:00:35 ha-089400 kubelet[2378]: E0408 19:00:35.801234    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:00:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:00:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:00:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:00:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p ha-089400 -n ha-089400
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p ha-089400 -n ha-089400: (12.7052476s)
helpers_test.go:261: (dbg) Run:  kubectl --context ha-089400 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiControlPlane/serial/PingHostFromPods FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiControlPlane/serial/PingHostFromPods (70.15s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StopSecondaryNode (48.85s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StopSecondaryNode
ha_test.go:365: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 node stop m02 -v=7 --alsologtostderr
ha_test.go:365: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p ha-089400 node stop m02 -v=7 --alsologtostderr: exit status 1 (12.5578937s)

                                                
                                                
-- stdout --
	* Stopping node "ha-089400-m02"  ...
	* Powering off "ha-089400-m02" via SSH ...

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 19:17:14.874562    2252 out.go:345] Setting OutFile to fd 1932 ...
	I0408 19:17:14.980523    2252 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:17:14.980876    2252 out.go:358] Setting ErrFile to fd 884...
	I0408 19:17:14.980876    2252 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:17:14.995062    2252 mustload.go:65] Loading cluster: ha-089400
	I0408 19:17:14.996225    2252 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:17:14.996225    2252 stop.go:39] StopHost: ha-089400-m02
	I0408 19:17:15.001315    2252 out.go:177] * Stopping node "ha-089400-m02"  ...
	I0408 19:17:15.005182    2252 machine.go:156] backing up vm config to /var/lib/minikube/backup: [/etc/cni /etc/kubernetes]
	I0408 19:17:15.023690    2252 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/backup
	I0408 19:17:15.023690    2252 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 19:17:17.275323    2252 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:17:17.275323    2252 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:17:17.275323    2252 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:17:19.955442    2252 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 19:17:19.955645    2252 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:17:19.955913    2252 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 19:17:20.076477    2252 ssh_runner.go:235] Completed: sudo mkdir -p /var/lib/minikube/backup: (5.0527479s)
	I0408 19:17:20.092159    2252 ssh_runner.go:195] Run: sudo rsync --archive --relative /etc/cni /var/lib/minikube/backup
	I0408 19:17:20.175737    2252 ssh_runner.go:195] Run: sudo rsync --archive --relative /etc/kubernetes /var/lib/minikube/backup
	I0408 19:17:20.247203    2252 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 19:17:22.484291    2252 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:17:22.484291    2252 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:17:22.487865    2252 out.go:177] * Powering off "ha-089400-m02" via SSH ...
	I0408 19:17:22.490330    2252 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 19:17:24.731842    2252 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:17:24.731842    2252 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:17:24.731971    2252 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]

                                                
                                                
** /stderr **
ha_test.go:367: secondary control-plane node stop returned an error. args "out/minikube-windows-amd64.exe -p ha-089400 node stop m02 -v=7 --alsologtostderr": exit status 1
ha_test.go:371: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr
ha_test.go:371: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr: context deadline exceeded (0s)
ha_test.go:374: failed to run minikube status. args "out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr" : context deadline exceeded
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p ha-089400 -n ha-089400
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p ha-089400 -n ha-089400: (12.6298453s)
helpers_test.go:244: <<< TestMultiControlPlane/serial/StopSecondaryNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiControlPlane/serial/StopSecondaryNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 logs -n 25: (9.0892318s)
helpers_test.go:252: TestMultiControlPlane/serial/StopSecondaryNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------------------------------------------------------------------------------|-----------|-------------------|---------|---------------------|---------------------|
	| Command |                                                           Args                                                            |  Profile  |       User        | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------------------------------------------------------------------------------|-----------|-------------------|---------|---------------------|---------------------|
	| cp      | ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:12 UTC | 08 Apr 25 19:12 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m03.txt |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:12 UTC | 08 Apr 25 19:12 UTC |
	|         | ha-089400-m03 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:12 UTC | 08 Apr 25 19:13 UTC |
	|         | ha-089400:/home/docker/cp-test_ha-089400-m03_ha-089400.txt                                                                |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:13 UTC | 08 Apr 25 19:13 UTC |
	|         | ha-089400-m03 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400 sudo cat                                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:13 UTC | 08 Apr 25 19:13 UTC |
	|         | /home/docker/cp-test_ha-089400-m03_ha-089400.txt                                                                          |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:13 UTC | 08 Apr 25 19:13 UTC |
	|         | ha-089400-m02:/home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt                                                        |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:13 UTC | 08 Apr 25 19:13 UTC |
	|         | ha-089400-m03 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400-m02 sudo cat                                                                                   | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:13 UTC | 08 Apr 25 19:14 UTC |
	|         | /home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt                                                                      |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:14 UTC | 08 Apr 25 19:14 UTC |
	|         | ha-089400-m04:/home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt                                                        |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:14 UTC | 08 Apr 25 19:14 UTC |
	|         | ha-089400-m03 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400-m04 sudo cat                                                                                   | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:14 UTC | 08 Apr 25 19:14 UTC |
	|         | /home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt                                                                      |           |                   |         |                     |                     |
	| cp      | ha-089400 cp testdata\cp-test.txt                                                                                         | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:14 UTC | 08 Apr 25 19:14 UTC |
	|         | ha-089400-m04:/home/docker/cp-test.txt                                                                                    |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:14 UTC | 08 Apr 25 19:15 UTC |
	|         | ha-089400-m04 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:15 UTC | 08 Apr 25 19:15 UTC |
	|         | C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m04.txt |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:15 UTC | 08 Apr 25 19:15 UTC |
	|         | ha-089400-m04 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:15 UTC | 08 Apr 25 19:15 UTC |
	|         | ha-089400:/home/docker/cp-test_ha-089400-m04_ha-089400.txt                                                                |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:15 UTC | 08 Apr 25 19:15 UTC |
	|         | ha-089400-m04 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400 sudo cat                                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:15 UTC | 08 Apr 25 19:16 UTC |
	|         | /home/docker/cp-test_ha-089400-m04_ha-089400.txt                                                                          |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:16 UTC | 08 Apr 25 19:16 UTC |
	|         | ha-089400-m02:/home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt                                                        |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:16 UTC | 08 Apr 25 19:16 UTC |
	|         | ha-089400-m04 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400-m02 sudo cat                                                                                   | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:16 UTC | 08 Apr 25 19:16 UTC |
	|         | /home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt                                                                      |           |                   |         |                     |                     |
	| cp      | ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt                                                                       | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:16 UTC | 08 Apr 25 19:16 UTC |
	|         | ha-089400-m03:/home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt                                                        |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n                                                                                                          | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:16 UTC | 08 Apr 25 19:17 UTC |
	|         | ha-089400-m04 sudo cat                                                                                                    |           |                   |         |                     |                     |
	|         | /home/docker/cp-test.txt                                                                                                  |           |                   |         |                     |                     |
	| ssh     | ha-089400 ssh -n ha-089400-m03 sudo cat                                                                                   | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:17 UTC | 08 Apr 25 19:17 UTC |
	|         | /home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt                                                                      |           |                   |         |                     |                     |
	| node    | ha-089400 node stop m02 -v=7                                                                                              | ha-089400 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:17 UTC |                     |
	|         | --alsologtostderr                                                                                                         |           |                   |         |                     |                     |
	|---------|---------------------------------------------------------------------------------------------------------------------------|-----------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 18:47:27
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 18:47:27.389938    9012 out.go:345] Setting OutFile to fd 1668 ...
	I0408 18:47:27.470893    9012 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:47:27.470961    9012 out.go:358] Setting ErrFile to fd 1060...
	I0408 18:47:27.470961    9012 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:47:27.491144    9012 out.go:352] Setting JSON to false
	I0408 18:47:27.495130    9012 start.go:129] hostinfo: {"hostname":"minikube3","uptime":98833,"bootTime":1744039214,"procs":178,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:47:27.495310    9012 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:47:27.501299    9012 out.go:177] * [ha-089400] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:47:27.505771    9012 notify.go:220] Checking for updates...
	I0408 18:47:27.506060    9012 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:47:27.508228    9012 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:47:27.511106    9012 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:47:27.514151    9012 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:47:27.516928    9012 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:47:27.520636    9012 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 18:47:32.886054    9012 out.go:177] * Using the hyperv driver based on user configuration
	I0408 18:47:32.890031    9012 start.go:297] selected driver: hyperv
	I0408 18:47:32.890031    9012 start.go:901] validating driver "hyperv" against <nil>
	I0408 18:47:32.890031    9012 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 18:47:32.938770    9012 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 18:47:32.940498    9012 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:47:32.940731    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:47:32.940731    9012 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 18:47:32.940731    9012 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 18:47:32.940863    9012 start.go:340] cluster config:
	{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker
CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthS
ock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:47:32.941292    9012 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 18:47:32.946232    9012 out.go:177] * Starting "ha-089400" primary control-plane node in "ha-089400" cluster
	I0408 18:47:32.948683    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:47:32.948880    9012 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 18:47:32.948951    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:47:32.949402    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:47:32.949432    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:47:32.949432    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:47:32.950460    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json: {Name:mk55ac39bf4944b017a16834787f25430b36f60e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:47:32.951496    9012 start.go:360] acquireMachinesLock for ha-089400: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:47:32.951496    9012 start.go:364] duration metric: took 0s to acquireMachinesLock for "ha-089400"
	I0408 18:47:32.951496    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID
:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:47:32.952123    9012 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 18:47:32.958880    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:47:32.958880    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:47:32.958880    9012 client.go:168] LocalClient.Create starting
	I0408 18:47:32.958880    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:47:32.959873    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:35.005039    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:47:36.766280    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:47:36.766280    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:36.767283    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:47:38.244880    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:47:38.245436    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:38.245436    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:47:41.873272    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:47:41.873272    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:41.876631    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:47:42.494633    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:47:42.735647    9012 main.go:141] libmachine: Creating VM...
	I0408 18:47:42.735647    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:47:45.673500    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:47:45.674094    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:45.674195    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:47:45.674260    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:47:47.467195    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:47:47.467239    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:47.467239    9012 main.go:141] libmachine: Creating VHD
	I0408 18:47:47.467334    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:47:51.283459    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 5407168E-05F8-4CF3-BBC7-354486FDC2A3
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:47:51.283577    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:51.283647    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:47:51.283647    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:47:51.299198    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:47:54.484434    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:47:54.484434    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:54.485009    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd' -SizeBytes 20000MB
	I0408 18:47:57.078641    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:47:57.079379    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:47:57.079466    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:48:00.792875    9012 main.go:141] libmachine: [stdout =====>] : 
	Name      State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----      ----- ----------- ----------------- ------   ------             -------
	ha-089400 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:48:00.792875    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:00.793874    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400 -DynamicMemoryEnabled $false
	I0408 18:48:03.041850    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:03.041850    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:03.042814    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400 -Count 2
	I0408 18:48:05.222593    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:05.222593    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:05.222721    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\boot2docker.iso'
	I0408 18:48:07.779036    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:07.779268    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:07.779343    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\disk.vhd'
	I0408 18:48:10.435215    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:10.435918    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:10.435918    9012 main.go:141] libmachine: Starting VM...
	I0408 18:48:10.435989    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:13.587424    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:48:13.587424    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:15.891090    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:15.891195    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:15.891230    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:18.472485    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:18.472485    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:19.473553    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:21.708758    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:21.709558    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:21.709616    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:24.274981    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:24.275051    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:25.276091    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:27.540349    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:30.056720    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:30.057027    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:31.058205    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:33.308936    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:35.846949    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:48:35.846949    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:36.847323    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:39.098368    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:39.098368    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:39.098898    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:41.725860    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:41.726081    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:41.726081    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:43.895113    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:43.895497    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:43.895497    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:48:43.895707    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:46.093331    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:46.094142    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:46.094142    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:48.656402    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:48.656402    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:48.663852    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:48.680228    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:48.680228    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:48:48.823952    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:48:48.824058    9012 buildroot.go:166] provisioning hostname "ha-089400"
	I0408 18:48:48.824058    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:51.028511    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:51.028511    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:51.029433    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:53.577231    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:53.577231    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:53.583493    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:53.584187    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:53.584187    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400 && echo "ha-089400" | sudo tee /etc/hostname
	I0408 18:48:53.750398    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400
	
	I0408 18:48:53.750398    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:48:55.931754    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:48:55.931754    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:55.932759    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:48:58.535826    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:48:58.536778    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:48:58.545702    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:48:58.546352    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:48:58.546352    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:48:58.699846    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:48:58.699927    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:48:58.699927    9012 buildroot.go:174] setting up certificates
	I0408 18:48:58.700018    9012 provision.go:84] configureAuth start
	I0408 18:48:58.700085    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:00.867466    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:00.867987    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:00.867987    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:03.452815    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:05.591010    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:05.592086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:05.592227    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:08.128168    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:08.128168    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:08.128168    9012 provision.go:143] copyHostCerts
	I0408 18:49:08.128562    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:49:08.128562    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:49:08.128562    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:49:08.129685    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:49:08.131183    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:49:08.131651    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:49:08.131651    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:49:08.132035    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:49:08.133093    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:49:08.133239    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:49:08.133239    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:49:08.133239    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:49:08.134841    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400 san=[127.0.0.1 172.22.47.59 ha-089400 localhost minikube]
	I0408 18:49:08.456027    9012 provision.go:177] copyRemoteCerts
	I0408 18:49:08.466786    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:49:08.466786    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:10.626994    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:10.627069    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:10.627069    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:13.170636    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:13.170865    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:13.171094    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:13.283620    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.8167959s)
	I0408 18:49:13.283620    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:49:13.284189    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:49:13.332377    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:49:13.333119    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1200 bytes)
	I0408 18:49:13.381101    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:49:13.381101    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0408 18:49:13.428784    9012 provision.go:87] duration metric: took 14.7286504s to configureAuth
	I0408 18:49:13.428894    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:49:13.429519    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:49:13.429519    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:15.586051    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:15.587045    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:15.587045    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:18.135339    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:18.136037    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:18.142374    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:18.143172    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:18.143172    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:49:18.291121    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:49:18.291204    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:49:18.291435    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:49:18.291529    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:20.437874    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:20.437874    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:20.438671    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:22.981428    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:22.981905    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:22.987045    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:22.987876    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:22.987876    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:49:23.153928    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:49:23.153928    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:25.272950    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:25.273112    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:25.273112    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:27.828587    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:27.828587    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:27.834490    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:27.835224    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:27.835376    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:49:30.064090    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:49:30.064090    9012 machine.go:96] duration metric: took 46.1682316s to provisionDockerMachine
	I0408 18:49:30.064090    9012 client.go:171] duration metric: took 1m57.1042998s to LocalClient.Create
	I0408 18:49:30.064090    9012 start.go:167] duration metric: took 1m57.1042998s to libmachine.API.Create "ha-089400"
	I0408 18:49:30.064385    9012 start.go:293] postStartSetup for "ha-089400" (driver="hyperv")
	I0408 18:49:30.064385    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:49:30.078741    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:49:30.078741    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:32.248301    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:32.248301    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:32.248860    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:34.800855    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:34.800855    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:34.801907    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:34.914977    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8361978s)
	I0408 18:49:34.926823    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:49:34.935556    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:49:34.935713    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:49:34.936489    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:49:34.938022    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:49:34.938117    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:49:34.951341    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:49:34.969357    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:49:35.014168    9012 start.go:296] duration metric: took 4.9497105s for postStartSetup
	I0408 18:49:35.017999    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:37.171114    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:37.172073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:37.172256    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:39.773538    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:39.773538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:39.774394    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:49:39.777377    9012 start.go:128] duration metric: took 2m6.8242664s to createHost
	I0408 18:49:39.777377    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:41.938801    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:41.938801    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:41.938964    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:44.499758    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:44.499758    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:44.506317    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:44.506550    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:44.506550    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:49:44.637495    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138184.642643337
	
	I0408 18:49:44.637663    9012 fix.go:216] guest clock: 1744138184.642643337
	I0408 18:49:44.637663    9012 fix.go:229] Guest: 2025-04-08 18:49:44.642643337 +0000 UTC Remote: 2025-04-08 18:49:39.7773774 +0000 UTC m=+132.483166301 (delta=4.865265937s)
	I0408 18:49:44.637998    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:46.791754    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:49.337665    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:49.337665    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:49.344623    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:49:49.344623    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.47.59 22 <nil> <nil>}
	I0408 18:49:49.344623    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138184
	I0408 18:49:49.501380    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:49:44 UTC 2025
	
	I0408 18:49:49.501380    9012 fix.go:236] clock set: Tue Apr  8 18:49:44 UTC 2025
	 (err=<nil>)
	I0408 18:49:49.501380    9012 start.go:83] releasing machines lock for "ha-089400", held for 2m16.5488195s
	I0408 18:49:49.502025    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:51.719308    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:51.720482    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:51.720579    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:54.254447    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:54.254447    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:54.259662    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:49:54.260310    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:54.272717    9012 ssh_runner.go:195] Run: cat /version.json
	I0408 18:49:54.272717    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:49:56.508785    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:56.509073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:49:59.226023    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:59.227038    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:59.227156    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:59.255668    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:49:59.256086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:49:59.256277    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:49:59.321952    9012 ssh_runner.go:235] Completed: cat /version.json: (5.0491947s)
	I0408 18:49:59.334260    9012 ssh_runner.go:195] Run: systemctl --version
	I0408 18:49:59.339399    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.0796961s)
	W0408 18:49:59.339399    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:49:59.354786    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	W0408 18:49:59.365178    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:49:59.375623    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:49:59.404462    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:49:59.404524    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:49:59.404524    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:49:59.452294    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 18:49:59.457495    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:49:59.457495    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:49:59.486350    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:49:59.507038    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:49:59.517686    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:49:59.547953    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:49:59.576688    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:49:59.609190    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:49:59.638716    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:49:59.668385    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:49:59.699189    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:49:59.729769    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:49:59.760311    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:49:59.780259    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:49:59.790262    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:49:59.827955    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:49:59.854821    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:00.057198    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:50:00.088370    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:50:00.098193    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:50:00.135125    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:50:00.167126    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:50:00.210084    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:50:00.244775    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:50:00.279654    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:50:00.341556    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:50:00.368587    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:50:00.414537    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:50:00.434810    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:50:00.453295    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:50:00.493338    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:50:00.692389    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:50:00.873048    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:50:00.873217    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:50:00.915246    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:01.101816    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:50:03.673068    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.571231s)
	I0408 18:50:03.684170    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:50:03.723824    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:50:03.765446    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:50:03.961805    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:50:04.169850    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:04.366467    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:50:04.408914    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:50:04.442222    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:04.633600    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:50:04.734114    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:50:04.745637    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:50:04.757091    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:50:04.768809    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:50:04.786302    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:50:04.839171    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:50:04.848578    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:50:04.890124    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:50:04.922882    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:50:04.923155    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:50:04.927893    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:50:04.930572    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:50:04.930572    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:50:04.941580    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:50:04.947683    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:50:04.980139    9012 kubeadm.go:883] updating cluster {Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespac
e:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mo
untUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 18:50:04.980139    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:50:04.988140    9012 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 18:50:05.013782    9012 docker.go:689] Got preloaded images: 
	I0408 18:50:05.013782    9012 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 18:50:05.026405    9012 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 18:50:05.056599    9012 ssh_runner.go:195] Run: which lz4
	I0408 18:50:05.064079    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 18:50:05.075283    9012 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 18:50:05.082136    9012 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 18:50:05.082136    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 18:50:06.857938    9012 docker.go:653] duration metric: took 1.7934577s to copy over tarball
	I0408 18:50:06.868970    9012 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 18:50:15.385073    9012 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (8.5150544s)
	I0408 18:50:15.385073    9012 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 18:50:15.447833    9012 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 18:50:15.470230    9012 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 18:50:15.514355    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:15.713001    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:50:18.866939    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1539127s)
	I0408 18:50:18.876869    9012 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 18:50:18.905603    9012 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 18:50:18.905680    9012 cache_images.go:84] Images are preloaded, skipping loading
	I0408 18:50:18.905744    9012 kubeadm.go:934] updating node { 172.22.47.59 8443 v1.32.2 docker true true} ...
	I0408 18:50:18.906272    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.47.59
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:50:18.918677    9012 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 18:50:18.984498    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:50:18.984546    9012 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 18:50:18.984546    9012 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 18:50:18.984546    9012 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.47.59 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:ha-089400 NodeName:ha-089400 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.47.59"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.47.59 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/etc/kubernetes/ma
nifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 18:50:18.984546    9012 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.47.59
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "ha-089400"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.47.59"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.47.59"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 18:50:18.984546    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:50:18.996646    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:50:19.021375    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:50:19.021646    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/super-admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:50:19.033800    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:50:19.048707    9012 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 18:50:19.059700    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube /etc/kubernetes/manifests
	I0408 18:50:19.078525    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (308 bytes)
	I0408 18:50:19.113741    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:50:19.142569    9012 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2287 bytes)
	I0408 18:50:19.171509    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1447 bytes)
	I0408 18:50:19.215137    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:50:19.221091    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:50:19.250736    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:50:19.429220    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:50:19.456418    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.47.59
	I0408 18:50:19.456418    9012 certs.go:194] generating shared ca certs ...
	I0408 18:50:19.456418    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:19.457222    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:50:19.457741    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:50:19.457986    9012 certs.go:256] generating profile certs ...
	I0408 18:50:19.458477    9012 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:50:19.458477    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt with IP's: []
	I0408 18:50:20.001097    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt ...
	I0408 18:50:20.001097    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.crt: {Name:mk091a720dfa0c60306c7ae51fa6699a7b88e9c7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.003307    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key ...
	I0408 18:50:20.003307    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key: {Name:mkfd1fda470bd79cdab55ffdf1b4b18ba5f62b7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.004976    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b
	I0408 18:50:20.004976    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.47.254]
	I0408 18:50:20.085546    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b ...
	I0408 18:50:20.085546    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b: {Name:mk01f3daa70ba5db3297bb03014cd9da2298fbac Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.086146    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b ...
	I0408 18:50:20.086146    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b: {Name:mk98c4e7ff1a4c18dc165d12fc3c5becc04d03d0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.087291    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.336ec62b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:50:20.105505    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.336ec62b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:50:20.107835    9012 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:50:20.108200    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt with IP's: []
	I0408 18:50:20.196099    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt ...
	I0408 18:50:20.197136    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt: {Name:mk77d14bfc20c02c360eb657f882d9f3b17479e3 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.198546    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key ...
	I0408 18:50:20.198546    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key: {Name:mkad206614b853ee3175ac3234f6ca59f16f8f18 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:20.199044    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:50:20.200088    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:50:20.200335    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:50:20.200521    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:50:20.212266    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:50:20.213367    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:50:20.213367    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:50:20.213367    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:50:20.214622    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:50:20.214951    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:50:20.215356    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:50:20.215668    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:50:20.216253    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.216301    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.216555    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:20.216782    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:50:20.264741    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:50:20.309146    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:50:20.354848    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:50:20.409399    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 18:50:20.462267    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:50:20.502704    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:50:20.554355    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:50:20.599417    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:50:20.644510    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:50:20.687024    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:50:20.730482    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 18:50:20.771269    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:50:20.790402    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:50:20.818848    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.826653    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.840467    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:50:20.859992    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:50:20.892550    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:50:20.924269    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.930952    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.939950    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:50:20.957703    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:50:20.987944    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:50:21.021785    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.029062    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.040161    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:50:21.065815    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:50:21.099444    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:50:21.105805    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:50:21.106098    9012 kubeadm.go:392] StartCluster: {Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:d
efault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mount
UID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:50:21.115691    9012 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 18:50:21.153086    9012 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 18:50:21.184578    9012 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 18:50:21.214637    9012 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 18:50:21.230918    9012 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 18:50:21.231008    9012 kubeadm.go:157] found existing configuration files:
	
	I0408 18:50:21.243090    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 18:50:21.267695    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 18:50:21.278822    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 18:50:21.313205    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 18:50:21.329095    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 18:50:21.340305    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 18:50:21.367959    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 18:50:21.384560    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 18:50:21.397334    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 18:50:21.428112    9012 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 18:50:21.446390    9012 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 18:50:21.458024    9012 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 18:50:21.475173    9012 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 18:50:21.935258    9012 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 18:50:36.267696    9012 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 18:50:36.267844    9012 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 18:50:36.268242    9012 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 18:50:36.268271    9012 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 18:50:36.268271    9012 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 18:50:36.268828    9012 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 18:50:36.271286    9012 out.go:235]   - Generating certificates and keys ...
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 18:50:36.272002    9012 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 18:50:36.272685    9012 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 18:50:36.272858    9012 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 18:50:36.273046    9012 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 18:50:36.273098    9012 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [ha-089400 localhost] and IPs [172.22.47.59 127.0.0.1 ::1]
	I0408 18:50:36.273098    9012 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 18:50:36.273665    9012 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [ha-089400 localhost] and IPs [172.22.47.59 127.0.0.1 ::1]
	I0408 18:50:36.273820    9012 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 18:50:36.273918    9012 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 18:50:36.273976    9012 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 18:50:36.273976    9012 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 18:50:36.274573    9012 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 18:50:36.274698    9012 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 18:50:36.274811    9012 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 18:50:36.274921    9012 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 18:50:36.274921    9012 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 18:50:36.280848    9012 out.go:235]   - Booting up control plane ...
	I0408 18:50:36.280848    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 18:50:36.281462    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 18:50:36.281462    9012 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 18:50:36.282025    9012 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 18:50:36.282229    9012 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 18:50:36.282278    9012 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 18:50:36.282278    9012 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 18:50:36.282797    9012 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.00197679s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 18:50:36.282874    9012 kubeadm.go:310] [api-check] The API server is healthy after 8.003425301s
	I0408 18:50:36.283479    9012 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 18:50:36.283479    9012 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 18:50:36.283479    9012 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 18:50:36.284084    9012 kubeadm.go:310] [mark-control-plane] Marking the node ha-089400 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 18:50:36.284620    9012 kubeadm.go:310] [bootstrap-token] Using token: ixg5xa.82wpmzozhmishmjw
	I0408 18:50:36.287677    9012 out.go:235]   - Configuring RBAC rules ...
	I0408 18:50:36.287853    9012 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 18:50:36.288049    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 18:50:36.288049    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 18:50:36.288713    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 18:50:36.288713    9012 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 18:50:36.289233    9012 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 18:50:36.289439    9012 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 18:50:36.289439    9012 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 18:50:36.289439    9012 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 18:50:36.289439    9012 kubeadm.go:310] 
	I0408 18:50:36.289439    9012 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 18:50:36.289969    9012 kubeadm.go:310] 
	I0408 18:50:36.290013    9012 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 18:50:36.290013    9012 kubeadm.go:310] 
	I0408 18:50:36.290013    9012 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 18:50:36.290013    9012 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 18:50:36.290013    9012 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 18:50:36.290532    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 18:50:36.290661    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 18:50:36.290661    9012 kubeadm.go:310] 
	I0408 18:50:36.290661    9012 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 18:50:36.291192    9012 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 18:50:36.291385    9012 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 18:50:36.291450    9012 kubeadm.go:310] 
	I0408 18:50:36.291450    9012 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 18:50:36.291450    9012 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 18:50:36.291975    9012 kubeadm.go:310] 
	I0408 18:50:36.292098    9012 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token ixg5xa.82wpmzozhmishmjw \
	I0408 18:50:36.292232    9012 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 18:50:36.292232    9012 kubeadm.go:310] 	--control-plane 
	I0408 18:50:36.292232    9012 kubeadm.go:310] 
	I0408 18:50:36.292232    9012 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 18:50:36.292232    9012 kubeadm.go:310] 
	I0408 18:50:36.292808    9012 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token ixg5xa.82wpmzozhmishmjw \
	I0408 18:50:36.292961    9012 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 18:50:36.292961    9012 cni.go:84] Creating CNI manager for ""
	I0408 18:50:36.292961    9012 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 18:50:36.297982    9012 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 18:50:36.312878    9012 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 18:50:36.319888    9012 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 18:50:36.319888    9012 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 18:50:36.368213    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 18:50:37.011649    9012 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 18:50:37.025776    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:37.025776    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400 minikube.k8s.io/updated_at=2025_04_08T18_50_37_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=true
	I0408 18:50:37.041765    9012 ops.go:34] apiserver oom_adj: -16
	I0408 18:50:37.256277    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:37.756146    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:38.255738    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:38.756244    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:39.255970    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:39.756636    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:40.253299    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 18:50:40.410128    9012 kubeadm.go:1113] duration metric: took 3.3982594s to wait for elevateKubeSystemPrivileges
	I0408 18:50:40.410128    9012 kubeadm.go:394] duration metric: took 19.3038762s to StartCluster
	I0408 18:50:40.410128    9012 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:40.410128    9012 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:50:40.412142    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:50:40.413135    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 18:50:40.413135    9012 start.go:233] HA (multi-control plane) cluster: will skip waiting for primary control-plane node &{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:50:40.413135    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:50:40.413135    9012 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 18:50:40.413135    9012 addons.go:69] Setting storage-provisioner=true in profile "ha-089400"
	I0408 18:50:40.413135    9012 addons.go:238] Setting addon storage-provisioner=true in "ha-089400"
	I0408 18:50:40.413135    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:50:40.413135    9012 addons.go:69] Setting default-storageclass=true in profile "ha-089400"
	I0408 18:50:40.414139    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:50:40.414139    9012 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "ha-089400"
	I0408 18:50:40.414139    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:40.415143    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:40.561974    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 18:50:40.931001    9012 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 18:50:42.810264    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:42.810804    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:42.812237    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:50:42.813438    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 18:50:42.814459    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:42.814459    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:42.816934    9012 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 18:50:42.816934    9012 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 18:50:42.816934    9012 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 18:50:42.817497    9012 addons.go:238] Setting addon default-storageclass=true in "ha-089400"
	I0408 18:50:42.817497    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:50:42.818473    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:42.820482    9012 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 18:50:42.820482    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 18:50:42.820482    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:45.323927    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:45.324004    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:45.324054    9012 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 18:50:45.324149    9012 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 18:50:45.324219    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:50:45.337850    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:45.337938    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:45.338100    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:50:47.696263    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:50:47.696404    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:47.696404    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:50:48.133507    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:50:48.134449    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:48.134714    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:50:48.293990    9012 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 18:50:50.388499    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:50:50.389442    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:50.389796    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:50:50.542944    9012 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 18:50:50.729333    9012 round_trippers.go:470] GET https://172.22.47.254:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 18:50:50.729430    9012 round_trippers.go:476] Request Headers:
	I0408 18:50:50.729430    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:50:50.729430    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:50:50.742651    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:50:50.743384    9012 round_trippers.go:470] PUT https://172.22.47.254:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 18:50:50.743437    9012 round_trippers.go:476] Request Headers:
	I0408 18:50:50.743437    9012 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 18:50:50.743437    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:50:50.743437    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:50:50.757119    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:50:50.760493    9012 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 18:50:50.763250    9012 addons.go:514] duration metric: took 10.350032s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 18:50:50.764232    9012 start.go:246] waiting for cluster config update ...
	I0408 18:50:50.764232    9012 start.go:255] writing updated cluster config ...
	I0408 18:50:50.766224    9012 out.go:201] 
	I0408 18:50:50.782463    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:50:50.782707    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:50:50.788640    9012 out.go:177] * Starting "ha-089400-m02" control-plane node in "ha-089400" cluster
	I0408 18:50:50.791459    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:50:50.791616    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:50:50.791994    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:50:50.791994    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:50:50.791994    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:50:50.800445    9012 start.go:360] acquireMachinesLock for ha-089400-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:50:50.800445    9012 start.go:364] duration metric: took 0s to acquireMachinesLock for "ha-089400-m02"
	I0408 18:50:50.800445    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mo
unt:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:50:50.801452    9012 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 18:50:50.805459    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:50:50.805459    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:50:50.805459    9012 client.go:168] LocalClient.Create starting
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:50:50.806448    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:50:50.807438    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:50:50.807438    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:50:50.807438    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:50:52.829281    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:50:52.829281    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:52.829572    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:50:54.574685    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:50:54.574685    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:54.575073    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:50:56.062297    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:50:56.062297    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:56.063134    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:50:59.740300    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:50:59.740494    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:50:59.743376    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:51:00.348807    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:51:00.699337    9012 main.go:141] libmachine: Creating VM...
	I0408 18:51:00.699337    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:51:03.778696    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:51:03.779083    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:03.779492    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:51:03.779568    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:05.615031    9012 main.go:141] libmachine: Creating VHD
	I0408 18:51:05.615031    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:51:09.566685    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : F69FC23E-1AA7-435D-8A7D-CA55C9612E7E
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:51:09.567538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:09.567622    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:51:09.567622    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:51:09.581618    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:51:12.834819    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:12.835843    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:12.835887    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd' -SizeBytes 20000MB
	I0408 18:51:15.455884    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:15.456251    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:15.456308    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:51:19.171199    9012 main.go:141] libmachine: [stdout =====>] : 
	Name          State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----          ----- ----------- ----------------- ------   ------             -------
	ha-089400-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:51:19.171294    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:19.171437    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400-m02 -DynamicMemoryEnabled $false
	I0408 18:51:21.471292    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:21.471711    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:21.471711    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400-m02 -Count 2
	I0408 18:51:23.741231    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:23.741231    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:23.741773    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\boot2docker.iso'
	I0408 18:51:26.337388    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:26.337388    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:26.338383    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\disk.vhd'
	I0408 18:51:29.039954    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:29.039954    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:29.040577    9012 main.go:141] libmachine: Starting VM...
	I0408 18:51:29.040577    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400-m02
	I0408 18:51:32.215515    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:32.215834    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:32.215834    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:51:32.215914    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:34.610050    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:34.610050    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:34.611104    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:37.194192    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:37.194455    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:38.194811    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:40.524583    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:40.524583    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:40.525378    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:43.081997    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:43.081997    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:44.083954    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:46.364842    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:46.364961    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:46.365041    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:48.933872    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:48.933872    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:49.934148    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:52.277229    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:52.277229    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:52.277922    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:51:54.870878    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:51:54.870878    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:55.871599    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:51:58.140324    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:51:58.140324    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:51:58.140758    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:00.779446    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:00.779446    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:00.780165    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:02.952796    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:02.953683    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:02.953683    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:52:02.953899    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:05.160496    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:05.160496    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:05.161192    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:07.800591    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:07.800591    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:07.807136    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:07.824300    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:07.824300    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:52:07.964932    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:52:07.965475    9012 buildroot.go:166] provisioning hostname "ha-089400-m02"
	I0408 18:52:07.965475    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:10.158538    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:10.158538    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:10.159403    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:12.742532    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:12.742532    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:12.749219    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:12.749540    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:12.749540    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400-m02 && echo "ha-089400-m02" | sudo tee /etc/hostname
	I0408 18:52:12.922643    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400-m02
	
	I0408 18:52:12.922830    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:15.084598    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:15.084598    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:15.085395    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:17.657739    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:17.657803    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:17.663672    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:17.664422    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:17.664422    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:52:17.820320    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:52:17.820320    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:52:17.820320    9012 buildroot.go:174] setting up certificates
	I0408 18:52:17.820854    9012 provision.go:84] configureAuth start
	I0408 18:52:17.820890    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:20.037843    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:20.038712    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:20.038712    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:22.690224    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:24.966902    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:24.967403    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:24.967468    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:27.637272    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:27.637272    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:27.637272    9012 provision.go:143] copyHostCerts
	I0408 18:52:27.637984    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:52:27.638347    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:52:27.638347    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:52:27.638347    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:52:27.639959    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:52:27.639959    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:52:27.639959    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:52:27.640630    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:52:27.641444    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:52:27.641444    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:52:27.641444    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:52:27.642069    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:52:27.643419    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400-m02 san=[127.0.0.1 172.22.34.212 ha-089400-m02 localhost minikube]
	I0408 18:52:27.975361    9012 provision.go:177] copyRemoteCerts
	I0408 18:52:27.986600    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:52:27.986600    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:30.225586    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:30.225586    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:30.225656    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:32.846192    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:32.846192    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:32.846824    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:52:32.960996    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (4.9743566s)
	I0408 18:52:32.961080    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:52:32.961171    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:52:33.010174    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:52:33.010174    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1208 bytes)
	I0408 18:52:33.060075    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:52:33.060075    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1675 bytes)
	I0408 18:52:33.114525    9012 provision.go:87] duration metric: took 15.2935482s to configureAuth
	I0408 18:52:33.114525    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:52:33.115172    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:52:33.115396    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:35.305726    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:35.305726    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:35.305916    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:37.916421    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:37.916421    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:37.921886    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:37.922775    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:37.922775    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:52:38.063175    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:52:38.063175    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:52:38.063175    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:52:38.063175    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:40.261159    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:40.261885    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:40.261967    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:42.883372    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:42.883372    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:42.889966    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:42.890668    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:42.890668    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.47.59"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:52:43.065595    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.47.59
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:52:43.066170    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:45.286392    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:45.286392    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:45.287289    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:47.895242    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:47.895242    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:47.902861    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:52:47.903624    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:52:47.903624    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:52:50.193404    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:52:50.193477    9012 machine.go:96] duration metric: took 47.2394153s to provisionDockerMachine
	I0408 18:52:50.193477    9012 client.go:171] duration metric: took 1m59.3870628s to LocalClient.Create
	I0408 18:52:50.193571    9012 start.go:167] duration metric: took 1m59.3870628s to libmachine.API.Create "ha-089400"
	I0408 18:52:50.193597    9012 start.go:293] postStartSetup for "ha-089400-m02" (driver="hyperv")
	I0408 18:52:50.193597    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:52:50.206370    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:52:50.206370    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:52.393299    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:54.966454    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:54.966877    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:54.967115    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:52:55.079170    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (4.8727163s)
	I0408 18:52:55.091782    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:52:55.098278    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:52:55.098442    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:52:55.098890    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:52:55.099859    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:52:55.099926    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:52:55.112480    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:52:55.130807    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:52:55.177794    9012 start.go:296] duration metric: took 4.9840997s for postStartSetup
	I0408 18:52:55.180992    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:52:57.331126    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:52:57.331613    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:57.331613    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:52:59.879853    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:52:59.879853    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:52:59.881016    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:52:59.883357    9012 start.go:128] duration metric: took 2m9.0808718s to createHost
	I0408 18:52:59.883499    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:02.058137    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:02.059154    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:02.059154    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:04.665681    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:04.666224    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:04.672236    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:53:04.673123    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:53:04.673123    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:53:04.807349    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138384.811839667
	
	I0408 18:53:04.807349    9012 fix.go:216] guest clock: 1744138384.811839667
	I0408 18:53:04.807349    9012 fix.go:229] Guest: 2025-04-08 18:53:04.811839667 +0000 UTC Remote: 2025-04-08 18:52:59.883499 +0000 UTC m=+332.587692001 (delta=4.928340667s)
	I0408 18:53:04.807444    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:06.996624    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:06.997557    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:06.997649    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:09.627263    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:09.628114    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:09.634268    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:53:09.635011    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.34.212 22 <nil> <nil>}
	I0408 18:53:09.635011    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138384
	I0408 18:53:09.796868    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:53:04 UTC 2025
	
	I0408 18:53:09.796868    9012 fix.go:236] clock set: Tue Apr  8 18:53:04 UTC 2025
	 (err=<nil>)
	I0408 18:53:09.796868    9012 start.go:83] releasing machines lock for "ha-089400-m02", held for 2m18.9953105s
	I0408 18:53:09.797172    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:12.024639    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:14.698789    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:14.698789    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:14.702776    9012 out.go:177] * Found network options:
	I0408 18:53:14.705848    9012 out.go:177]   - NO_PROXY=172.22.47.59
	W0408 18:53:14.708025    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:53:14.710106    9012 out.go:177]   - NO_PROXY=172.22.47.59
	W0408 18:53:14.713348    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:53:14.714576    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:53:14.717319    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:53:14.717319    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:14.726311    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 18:53:14.726311    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m02 ).state
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:16.995971    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:16.996744    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:16.996807    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:19.758008    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:19.758167    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:19.758167    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:53:19.784461    9012 main.go:141] libmachine: [stdout =====>] : 172.22.34.212
	
	I0408 18:53:19.784461    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:19.784669    9012 sshutil.go:53] new ssh client: &{IP:172.22.34.212 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m02\id_rsa Username:docker}
	I0408 18:53:19.851171    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1337293s)
	W0408 18:53:19.851171    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:53:19.886095    9012 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.1597421s)
	W0408 18:53:19.886095    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:53:19.898796    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:53:19.928255    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:53:19.928255    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:53:19.928478    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:53:19.975658    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 18:53:20.009295    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:53:20.030071    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	W0408 18:53:20.031054    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:53:20.031054    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:53:20.041542    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 18:53:20.075859    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:53:20.107245    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:53:20.140060    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:53:20.169880    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:53:20.203679    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:53:20.234319    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:53:20.263348    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:53:20.293136    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:53:20.310909    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:53:20.323813    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:53:20.358562    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:53:20.385889    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:20.602442    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:53:20.642238    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:53:20.654955    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:53:20.688363    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:53:20.720343    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:53:20.763341    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:53:20.799250    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:53:20.836038    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:53:20.893241    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:53:20.915790    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:53:20.966631    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:53:20.986380    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:53:21.002898    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:53:21.045328    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:53:21.248505    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:53:21.440253    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:53:21.440253    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:53:21.483982    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:21.669200    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:53:24.313290    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6440688s)
	I0408 18:53:24.327954    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:53:24.362931    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:53:24.401268    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:53:24.620390    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:53:24.831498    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:25.027238    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:53:25.070346    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:53:25.107147    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:25.309870    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:53:25.421414    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:53:25.434249    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:53:25.442491    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:53:25.454355    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:53:25.472376    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:53:25.530911    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:53:25.541418    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:53:25.591357    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:53:25.628428    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:53:25.631221    9012 out.go:177]   - env NO_PROXY=172.22.47.59
	I0408 18:53:25.634064    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:53:25.638327    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:53:25.641312    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:53:25.641312    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:53:25.650763    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:53:25.657404    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:53:25.678843    9012 mustload.go:65] Loading cluster: ha-089400
	I0408 18:53:25.679506    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:53:25.679705    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:27.857804    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:27.857804    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:27.858275    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:53:27.859100    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.34.212
	I0408 18:53:27.859100    9012 certs.go:194] generating shared ca certs ...
	I0408 18:53:27.859232    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:27.859621    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:53:27.860543    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:53:27.860775    9012 certs.go:256] generating profile certs ...
	I0408 18:53:27.861585    9012 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:53:27.861902    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4
	I0408 18:53:27.862062    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.34.212 172.22.47.254]
	I0408 18:53:28.151093    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 ...
	I0408 18:53:28.151093    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4: {Name:mke938a312a87a4d60be7a0c841b7b96f066f3e0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:28.152605    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4 ...
	I0408 18:53:28.152605    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4: {Name:mkaea1d23176606a41bfae74829fb46c1c1b82ca Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:53:28.153405    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.b41a13d4 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:53:28.174325    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.b41a13d4 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:53:28.175898    9012 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:53:28.176106    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:53:28.176240    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:53:28.176998    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:53:28.177354    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:53:28.177354    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:53:28.178439    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:53:28.178730    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:53:28.178893    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:53:28.178893    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:53:28.179793    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:53:28.179888    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:53:28.179952    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:53:28.179952    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:53:28.180816    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:53:28.180948    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:53:28.180948    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:53:28.181532    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:28.181887    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:53:28.182039    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:30.403804    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:30.404390    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:30.404390    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:33.000299    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:53:33.000299    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:33.000909    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:53:33.101584    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0408 18:53:33.110089    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0408 18:53:33.145086    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0408 18:53:33.151672    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0408 18:53:33.186697    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0408 18:53:33.193271    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0408 18:53:33.223666    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0408 18:53:33.229946    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0408 18:53:33.262262    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0408 18:53:33.270156    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0408 18:53:33.299929    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0408 18:53:33.306482    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0408 18:53:33.326586    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:53:33.374327    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:53:33.418580    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:53:33.466287    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:53:33.512485    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1436 bytes)
	I0408 18:53:33.559874    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:53:33.606637    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:53:33.659315    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:53:33.704637    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:53:33.752000    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:53:33.799479    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:53:33.847297    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0408 18:53:33.879497    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0408 18:53:33.911931    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0408 18:53:33.944293    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0408 18:53:33.975154    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0408 18:53:34.005701    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0408 18:53:34.035812    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0408 18:53:34.085951    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:53:34.107716    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:53:34.138099    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.145561    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.156715    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:53:34.180434    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:53:34.211793    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:53:34.245161    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.253685    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.264752    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:53:34.290058    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:53:34.324012    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:53:34.356307    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.363073    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.373709    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:53:34.396329    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:53:34.427012    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:53:34.434309    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:53:34.434309    9012 kubeadm.go:934] updating node {m02 172.22.34.212 8443 v1.32.2 docker true true} ...
	I0408 18:53:34.434855    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400-m02 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.34.212
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:53:34.434972    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:53:34.446276    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:53:34.471377    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:53:34.471521    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:53:34.482250    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:53:34.506426    9012 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.32.2: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.32.2': No such file or directory
	
	Initiating transfer...
	I0408 18:53:34.519007    9012 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.32.2
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm
	I0408 18:53:34.546765    9012 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl
	I0408 18:53:35.661266    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl -> /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:53:35.686805    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:53:35.693858    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubectl': No such file or directory
	I0408 18:53:35.693858    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl --> /var/lib/minikube/binaries/v1.32.2/kubectl (57323672 bytes)
	I0408 18:53:35.942804    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm -> /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:53:35.955081    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:53:35.971474    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubeadm: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubeadm': No such file or directory
	I0408 18:53:35.971474    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm --> /var/lib/minikube/binaries/v1.32.2/kubeadm (70942872 bytes)
	I0408 18:53:36.225260    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:53:36.303715    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet -> /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:53:36.314355    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:53:36.330893    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubelet: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubelet': No such file or directory
	I0408 18:53:36.330893    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet --> /var/lib/minikube/binaries/v1.32.2/kubelet (77406468 bytes)
	I0408 18:53:37.047075    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0408 18:53:37.073721    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (313 bytes)
	I0408 18:53:37.108848    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:53:37.140025    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0408 18:53:37.191089    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:53:37.200878    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:53:37.234891    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:53:37.431447    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:53:37.460745    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:53:37.461874    9012 start.go:317] joinCluster: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:def
ault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false M
ountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:53:37.462114    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0408 18:53:37.462114    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:53:39.662992    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:53:39.662992    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:39.664017    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:53:42.347902    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:53:42.348677    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:53:42.348677    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:53:42.859972    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0": (5.3977347s)
	I0408 18:53:42.860224    9012 start.go:343] trying to join control-plane node "m02" to cluster: &{Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:53:42.860309    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token kqgs6j.q4ws2792ks88b89m --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m02 --control-plane --apiserver-advertise-address=172.22.34.212 --apiserver-bind-port=8443"
	I0408 18:54:23.022307    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token kqgs6j.q4ws2792ks88b89m --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m02 --control-plane --apiserver-advertise-address=172.22.34.212 --apiserver-bind-port=8443": (40.1616065s)
	I0408 18:54:23.022430    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0408 18:54:23.827125    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400-m02 minikube.k8s.io/updated_at=2025_04_08T18_54_23_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=false
	I0408 18:54:24.006525    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-089400-m02 node-role.kubernetes.io/control-plane:NoSchedule-
	I0408 18:54:24.177293    9012 start.go:319] duration metric: took 46.7150453s to joinCluster
	I0408 18:54:24.177500    9012 start.go:235] Will wait 6m0s for node &{Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:54:24.178359    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:54:24.182100    9012 out.go:177] * Verifying Kubernetes components...
	I0408 18:54:24.195483    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:54:24.557638    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:54:24.587442    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:54:24.588237    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0408 18:54:24.588237    9012 kubeadm.go:483] Overriding stale ClientConfig host https://172.22.47.254:8443 with https://172.22.47.59:8443
	I0408 18:54:24.589260    9012 node_ready.go:35] waiting up to 6m0s for node "ha-089400-m02" to be "Ready" ...
	I0408 18:54:24.589260    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:24.589260    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:24.589260    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:24.589260    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:24.611608    9012 round_trippers.go:581] Response Status: 200 OK in 22 milliseconds
	I0408 18:54:25.090289    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:25.090289    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:25.090289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:25.090289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:25.104766    9012 round_trippers.go:581] Response Status: 200 OK in 14 milliseconds
	I0408 18:54:25.589441    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:25.589441    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:25.589441    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:25.589441    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:25.598744    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:26.092857    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:26.092857    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:26.092857    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:26.092857    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:26.099814    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:26.590872    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:26.590872    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:26.590872    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:26.590872    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:26.595446    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:26.596484    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:27.090561    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:27.090561    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:27.090561    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:27.090561    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:27.097508    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:27.589624    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:27.589624    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:27.589624    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:27.589624    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:27.603086    9012 round_trippers.go:581] Response Status: 200 OK in 13 milliseconds
	I0408 18:54:28.090321    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:28.090321    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:28.090321    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:28.090321    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:28.099946    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:28.589628    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:28.589628    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:28.589628    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:28.589628    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:28.881039    9012 round_trippers.go:581] Response Status: 200 OK in 291 milliseconds
	I0408 18:54:28.881670    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:29.089605    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:29.090080    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:29.090080    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:29.090080    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:29.105866    9012 round_trippers.go:581] Response Status: 200 OK in 15 milliseconds
	I0408 18:54:29.589760    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:29.589760    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:29.589760    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:29.589760    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:29.594937    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:30.090212    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:30.090212    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:30.090212    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:30.090212    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:30.130855    9012 round_trippers.go:581] Response Status: 200 OK in 40 milliseconds
	I0408 18:54:30.590698    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:30.590698    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:30.590698    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:30.590698    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:30.595524    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:31.090135    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:31.090135    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:31.090135    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:31.090135    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:31.095959    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:31.096190    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:31.589728    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:31.590182    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:31.590289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:31.590289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:31.597113    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:32.089598    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:32.089598    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:32.089598    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:32.089598    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:32.096441    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:32.590029    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:32.590029    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:32.590083    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:32.590083    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:32.595155    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:33.090794    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:33.090874    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:33.090874    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:33.090874    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:33.099681    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:54:33.100480    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:33.590768    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:33.590816    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:33.590816    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:33.590877    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:33.595389    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:34.089983    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:34.090068    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:34.090068    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:34.090068    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:34.096670    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:34.590231    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:34.590231    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:34.590231    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:34.590231    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:34.595061    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:35.089973    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:35.089973    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:35.089973    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:35.089973    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:35.095889    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:35.590183    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:35.590183    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:35.590183    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:35.590183    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:35.595765    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:35.595999    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:36.089996    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:36.089996    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:36.089996    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:36.089996    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:36.095798    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:36.589484    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:36.589484    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:36.589484    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:36.589484    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:36.596471    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:37.090157    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:37.090232    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:37.090232    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:37.090232    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:37.095769    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:37.590533    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:37.590533    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:37.590533    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:37.590533    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:37.598562    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:37.599073    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:38.089788    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:38.089788    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:38.089788    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:38.089788    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:38.095397    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:38.589721    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:38.589721    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:38.589721    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:38.589721    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:38.595079    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:39.090103    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:39.090103    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:39.090103    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:39.090544    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:39.097202    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:39.591040    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:39.591114    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:39.591114    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:39.591114    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:39.600507    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:39.601066    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:40.090430    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:40.090430    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:40.090430    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:40.090430    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:40.096432    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:40.590886    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:40.590886    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:40.590886    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:40.590979    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:40.596101    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:41.090077    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:41.090077    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:41.090077    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:41.090077    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:41.098578    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:54:41.589430    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:41.590240    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:41.590240    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:41.590240    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:41.597360    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:42.091166    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:42.091237    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:42.091237    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:42.091237    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:42.097727    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:42.097727    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:42.589948    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:42.589948    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:42.589948    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:42.589948    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:42.595105    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:43.090478    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:43.090564    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:43.090640    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:43.090640    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:43.099930    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:54:43.590383    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:43.590383    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:43.590383    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:43.590383    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:43.596247    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:44.090544    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:44.090544    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:44.090544    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:44.090544    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:44.097507    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:44.097905    9012 node_ready.go:53] node "ha-089400-m02" has status "Ready":"False"
	I0408 18:54:44.590903    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:44.590903    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:44.590903    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:44.591007    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:44.595808    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.090138    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.090260    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.090260    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.090260    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.095482    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:45.589489    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.589489    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.589489    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.589489    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.608446    9012 round_trippers.go:581] Response Status: 200 OK in 18 milliseconds
	I0408 18:54:45.608875    9012 node_ready.go:49] node "ha-089400-m02" has status "Ready":"True"
	I0408 18:54:45.608875    9012 node_ready.go:38] duration metric: took 21.0194473s for node "ha-089400-m02" to be "Ready" ...
	I0408 18:54:45.608972    9012 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:54:45.609103    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:45.609103    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.609103    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.609103    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.613715    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.616450    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.616978    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7f57d
	I0408 18:54:45.616978    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.616978    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.616978    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.621824    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.623363    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.623422    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.623467    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.623467    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.627217    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.627217    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.627217    9012 pod_ready.go:82] duration metric: took 10.7675ms for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.627217    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.627217    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7v5zn
	I0408 18:54:45.627217    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.627217    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.627217    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.631872    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.632852    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.632907    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.632907    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.632907    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.640893    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:45.641232    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.641306    9012 pod_ready.go:82] duration metric: took 14.0887ms for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.641306    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.641473    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400
	I0408 18:54:45.641473    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.641575    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.641575    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.645693    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:45.646561    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.646619    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.646619    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.646619    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.650348    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.650976    9012 pod_ready.go:93] pod "etcd-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.650976    9012 pod_ready.go:82] duration metric: took 9.583ms for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.651035    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.651131    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m02
	I0408 18:54:45.651131    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.651131    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.651208    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.655317    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.655911    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:45.655969    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.655969    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.655969    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.659487    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:54:45.659487    9012 pod_ready.go:93] pod "etcd-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.659487    9012 pod_ready.go:82] duration metric: took 8.4518ms for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.659487    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.790439    9012 request.go:661] Waited for 130.9515ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:54:45.790439    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:54:45.790439    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.790439    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.790439    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.796569    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:45.989803    9012 request.go:661] Waited for 192.5668ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.989803    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:45.989803    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:45.989803    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:45.989803    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:45.995353    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:45.995823    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:45.995851    9012 pod_ready.go:82] duration metric: took 336.3616ms for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:45.995899    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.190573    9012 request.go:661] Waited for 194.6308ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:54:46.190573    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:54:46.190573    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.190573    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.190573    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.197272    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:46.389964    9012 request.go:661] Waited for 192.1505ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:46.390524    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:46.390524    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.390524    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.390524    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.396007    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:46.396573    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:46.396573    9012 pod_ready.go:82] duration metric: took 400.6704ms for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.396573    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.590111    9012 request.go:661] Waited for 193.4139ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:54:46.590111    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:54:46.590111    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.590111    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.590111    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.596242    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:46.789598    9012 request.go:661] Waited for 192.9162ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:46.789598    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:46.789598    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.789598    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.789598    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.795177    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:46.795498    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:46.795570    9012 pod_ready.go:82] duration metric: took 398.9944ms for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.795570    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:46.989564    9012 request.go:661] Waited for 193.8921ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:54:46.989564    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:54:46.989564    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:46.989564    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:46.989564    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:46.995000    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.190059    9012 request.go:661] Waited for 194.6675ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.190059    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.190059    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.190059    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.190059    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.195664    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.196609    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.196665    9012 pod_ready.go:82] duration metric: took 401.092ms for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.196738    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.390057    9012 request.go:661] Waited for 193.2515ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:54:47.390555    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:54:47.390593    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.390593    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.390653    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.395742    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:47.589928    9012 request.go:661] Waited for 193.6789ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.589928    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:47.589928    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.589928    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.589928    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.596408    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:47.596946    9012 pod_ready.go:93] pod "kube-proxy-c4hjd" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.597013    9012 pod_ready.go:82] duration metric: took 400.2716ms for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.597013    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.790222    9012 request.go:661] Waited for 193.0608ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:54:47.790222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:54:47.790222    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.790222    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.790222    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.794932    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:54:47.990470    9012 request.go:661] Waited for 195.0792ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:47.990470    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:47.990470    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:47.990470    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:47.990470    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:47.996802    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:47.997277    9012 pod_ready.go:93] pod "kube-proxy-gf6wz" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:47.997277    9012 pod_ready.go:82] duration metric: took 400.2606ms for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:47.997394    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.190530    9012 request.go:661] Waited for 193.0561ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:54:48.191093    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:54:48.191093    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.191093    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.191093    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.196270    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:48.389627    9012 request.go:661] Waited for 192.8356ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:48.389627    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:54:48.389627    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.389627    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.389627    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.396969    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:48.397434    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:48.397509    9012 pod_ready.go:82] duration metric: took 400.1122ms for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.397509    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.590287    9012 request.go:661] Waited for 192.6414ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:54:48.590287    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:54:48.590287    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.590287    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.590287    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.597461    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:48.790108    9012 request.go:661] Waited for 192.2464ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:48.790108    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:54:48.790108    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.790108    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.790108    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.796253    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:48.797464    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:54:48.797558    9012 pod_ready.go:82] duration metric: took 400.0455ms for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:54:48.797558    9012 pod_ready.go:39] duration metric: took 3.1885608s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:54:48.797734    9012 api_server.go:52] waiting for apiserver process to appear ...
	I0408 18:54:48.808921    9012 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 18:54:48.837674    9012 api_server.go:72] duration metric: took 24.6597963s to wait for apiserver process to appear ...
	I0408 18:54:48.837766    9012 api_server.go:88] waiting for apiserver healthz status ...
	I0408 18:54:48.837832    9012 api_server.go:253] Checking apiserver healthz at https://172.22.47.59:8443/healthz ...
	I0408 18:54:48.854654    9012 api_server.go:279] https://172.22.47.59:8443/healthz returned 200:
	ok
	I0408 18:54:48.854820    9012 round_trippers.go:470] GET https://172.22.47.59:8443/version
	I0408 18:54:48.854820    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.854820    9012 round_trippers.go:480]     Accept: application/json, */*
	I0408 18:54:48.854820    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.856697    9012 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0408 18:54:48.856885    9012 api_server.go:141] control plane version: v1.32.2
	I0408 18:54:48.856885    9012 api_server.go:131] duration metric: took 19.1193ms to wait for apiserver health ...
	I0408 18:54:48.856954    9012 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 18:54:48.990014    9012 request.go:661] Waited for 133.0596ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:48.990014    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:48.990014    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:48.990014    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:48.990014    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:48.996971    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:48.999900    9012 system_pods.go:59] 17 kube-system pods found
	I0408 18:54:49.000002    9012 system_pods.go:61] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:54:49.000002    9012 system_pods.go:61] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:54:49.000002    9012 system_pods.go:61] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:54:49.000127    9012 system_pods.go:61] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:54:49.000251    9012 system_pods.go:61] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:54:49.000413    9012 system_pods.go:61] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:54:49.000479    9012 system_pods.go:61] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:54:49.000536    9012 system_pods.go:74] duration metric: took 143.5809ms to wait for pod list to return data ...
	I0408 18:54:49.000536    9012 default_sa.go:34] waiting for default service account to be created ...
	I0408 18:54:49.190030    9012 request.go:661] Waited for 189.261ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:54:49.190030    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:54:49.190030    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.190030    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.190030    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.196049    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:54:49.196390    9012 default_sa.go:45] found service account: "default"
	I0408 18:54:49.196390    9012 default_sa.go:55] duration metric: took 195.8525ms for default service account to be created ...
	I0408 18:54:49.196390    9012 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 18:54:49.390214    9012 request.go:661] Waited for 193.6791ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:49.390214    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:54:49.390214    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.390214    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.390214    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.395692    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:54:49.398072    9012 system_pods.go:86] 17 kube-system pods found
	I0408 18:54:49.398072    9012 system_pods.go:89] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:54:49.398139    9012 system_pods.go:89] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:54:49.398206    9012 system_pods.go:89] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:54:49.398206    9012 system_pods.go:126] duration metric: took 201.8142ms to wait for k8s-apps to be running ...
	I0408 18:54:49.398275    9012 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 18:54:49.409314    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:54:49.434819    9012 system_svc.go:56] duration metric: took 36.5443ms WaitForService to wait for kubelet
	I0408 18:54:49.434900    9012 kubeadm.go:582] duration metric: took 25.2570168s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:54:49.434945    9012 node_conditions.go:102] verifying NodePressure condition ...
	I0408 18:54:49.590332    9012 request.go:661] Waited for 155.3278ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes
	I0408 18:54:49.590332    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes
	I0408 18:54:49.590332    9012 round_trippers.go:476] Request Headers:
	I0408 18:54:49.590332    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:54:49.590332    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:54:49.597343    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:54:49.597343    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:54:49.597343    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:54:49.597343    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:54:49.597343    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:54:49.597343    9012 node_conditions.go:105] duration metric: took 162.3966ms to run NodePressure ...
	I0408 18:54:49.597343    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:54:49.597343    9012 start.go:255] writing updated cluster config ...
	I0408 18:54:49.601343    9012 out.go:201] 
	I0408 18:54:49.628374    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:54:49.628732    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:54:49.639624    9012 out.go:177] * Starting "ha-089400-m03" control-plane node in "ha-089400" cluster
	I0408 18:54:49.644132    9012 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 18:54:49.644576    9012 cache.go:56] Caching tarball of preloaded images
	I0408 18:54:49.644794    9012 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 18:54:49.645069    9012 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 18:54:49.645280    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:54:49.654517    9012 start.go:360] acquireMachinesLock for ha-089400-m03: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 18:54:49.655372    9012 start.go:364] duration metric: took 294.7µs to acquireMachinesLock for "ha-089400-m03"
	I0408 18:54:49.655372    9012 start.go:93] Provisioning new machine with config: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName
:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m03 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false insp
ektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror:
DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m03 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:54:49.655372    9012 start.go:125] createHost starting for "m03" (driver="hyperv")
	I0408 18:54:49.659242    9012 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 18:54:49.659242    9012 start.go:159] libmachine.API.Create for "ha-089400" (driver="hyperv")
	I0408 18:54:49.660234    9012 client.go:168] LocalClient.Create starting
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:54:49.660234    9012 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 18:54:49.661237    9012 main.go:141] libmachine: Decoding PEM data...
	I0408 18:54:49.661237    9012 main.go:141] libmachine: Parsing certificate...
	I0408 18:54:49.661237    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:51.653417    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 18:54:53.454949    9012 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 18:54:53.455426    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:53.455522    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:54:54.994058    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:54:54.994058    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:54.994659    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:54:58.885321    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:54:58.885610    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:54:58.887874    9012 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 18:54:59.381732    9012 main.go:141] libmachine: Creating SSH key...
	I0408 18:54:59.981263    9012 main.go:141] libmachine: Creating VM...
	I0408 18:54:59.981263    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 18:55:02.984454    9012 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 18:55:02.984907    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:02.984907    9012 main.go:141] libmachine: Using switch "Default Switch"
	I0408 18:55:02.984907    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 18:55:04.789345    9012 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 18:55:04.789647    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:04.789647    9012 main.go:141] libmachine: Creating VHD
	I0408 18:55:04.789740    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 18:55:08.599487    9012 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 92E34E2C-7C02-4340-AA80-28FCC82AAE37
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 18:55:08.599628    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:08.599628    9012 main.go:141] libmachine: Writing magic tar header
	I0408 18:55:08.599628    9012 main.go:141] libmachine: Writing SSH key tar header
	I0408 18:55:08.613873    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 18:55:11.860797    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:11.860797    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:11.861881    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd' -SizeBytes 20000MB
	I0408 18:55:14.473440    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:14.474496    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:14.474551    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 18:55:18.203349    9012 main.go:141] libmachine: [stdout =====>] : 
	Name          State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----          ----- ----------- ----------------- ------   ------             -------
	ha-089400-m03 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 18:55:18.203522    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:18.203522    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName ha-089400-m03 -DynamicMemoryEnabled $false
	I0408 18:55:20.572148    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:20.572303    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:20.572303    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor ha-089400-m03 -Count 2
	I0408 18:55:22.816261    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:22.816261    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:22.817014    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\boot2docker.iso'
	I0408 18:55:25.486808    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:25.486808    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:25.486938    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName ha-089400-m03 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\disk.vhd'
	I0408 18:55:28.228207    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:28.228699    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:28.228699    9012 main.go:141] libmachine: Starting VM...
	I0408 18:55:28.228699    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM ha-089400-m03
	I0408 18:55:31.446225    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:31.446225    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:31.446225    9012 main.go:141] libmachine: Waiting for host to start...
	I0408 18:55:31.446939    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:33.795918    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:33.795918    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:33.796775    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:36.382194    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:36.382285    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:37.383291    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:39.660623    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:39.660623    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:39.660764    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:42.327572    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:42.328082    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:43.328946    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:45.622469    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:45.622469    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:45.623158    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:48.239837    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:48.239837    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:49.240763    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:51.579977    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:51.580719    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:51.580719    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:55:54.183684    9012 main.go:141] libmachine: [stdout =====>] : 
	I0408 18:55:54.183684    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:55.184757    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:55:57.505250    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:00.167685    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:00.167685    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:00.168199    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:02.364246    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:02.364718    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:02.364718    9012 machine.go:93] provisionDockerMachine start ...
	I0408 18:56:02.364817    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:04.585267    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:04.585649    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:04.585737    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:07.201924    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:07.202421    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:07.207326    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:07.208443    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:07.208524    9012 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 18:56:07.338977    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 18:56:07.339039    9012 buildroot.go:166] provisioning hostname "ha-089400-m03"
	I0408 18:56:07.339110    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:09.574288    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:12.198453    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:12.198816    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:12.204668    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:12.205304    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:12.205304    9012 main.go:141] libmachine: About to run SSH command:
	sudo hostname ha-089400-m03 && echo "ha-089400-m03" | sudo tee /etc/hostname
	I0408 18:56:12.368874    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: ha-089400-m03
	
	I0408 18:56:12.369426    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:14.557978    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:14.557978    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:14.559040    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:17.207842    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:17.207842    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:17.214498    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:17.215031    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:17.215031    9012 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\sha-089400-m03' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 ha-089400-m03/g' /etc/hosts;
				else 
					echo '127.0.1.1 ha-089400-m03' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 18:56:17.349077    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 18:56:17.349077    9012 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 18:56:17.349077    9012 buildroot.go:174] setting up certificates
	I0408 18:56:17.349077    9012 provision.go:84] configureAuth start
	I0408 18:56:17.349077    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:19.547597    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:19.547597    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:19.548399    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:22.182166    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:24.366409    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:24.366409    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:24.367159    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:26.998174    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:26.998903    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:26.998903    9012 provision.go:143] copyHostCerts
	I0408 18:56:26.998903    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 18:56:26.998903    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 18:56:26.999514    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 18:56:26.999840    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 18:56:27.001714    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 18:56:27.001714    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 18:56:27.001714    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 18:56:27.002612    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 18:56:27.003736    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 18:56:27.003736    9012 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 18:56:27.004257    9012 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 18:56:27.004561    9012 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 18:56:27.005991    9012 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.ha-089400-m03 san=[127.0.0.1 172.22.44.49 ha-089400-m03 localhost minikube]
	I0408 18:56:27.342248    9012 provision.go:177] copyRemoteCerts
	I0408 18:56:27.353317    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 18:56:27.353317    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:29.585019    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:29.585079    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:29.585079    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:32.253339    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:32.253499    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:32.253681    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:56:32.361404    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0080465s)
	I0408 18:56:32.361495    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 18:56:32.362055    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1208 bytes)
	I0408 18:56:32.409401    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 18:56:32.409848    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 18:56:32.465286    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 18:56:32.465331    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 18:56:32.518700    9012 provision.go:87] duration metric: took 15.1695014s to configureAuth
	I0408 18:56:32.518700    9012 buildroot.go:189] setting minikube options for container-runtime
	I0408 18:56:32.519677    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:56:32.519677    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:34.712907    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:34.713767    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:34.713767    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:37.328489    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:37.328489    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:37.337772    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:37.337772    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:37.337772    9012 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 18:56:37.469355    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 18:56:37.469450    9012 buildroot.go:70] root file system type: tmpfs
	I0408 18:56:37.469653    9012 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 18:56:37.469653    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:39.710086    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:39.710086    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:39.710880    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:42.351298    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:42.351298    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:42.356476    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:42.357177    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:42.357177    9012 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.47.59"
	Environment="NO_PROXY=172.22.47.59,172.22.34.212"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 18:56:42.511048    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.47.59
	Environment=NO_PROXY=172.22.47.59,172.22.34.212
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 18:56:42.511184    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:44.722275    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:44.722275    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:44.722448    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:47.340986    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:47.341166    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:47.346907    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:56:47.347594    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:56:47.347692    9012 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 18:56:49.606889    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 18:56:49.606889    9012 machine.go:96] duration metric: took 47.2417942s to provisionDockerMachine
	I0408 18:56:49.606889    9012 client.go:171] duration metric: took 1m59.9456973s to LocalClient.Create
	I0408 18:56:49.606889    9012 start.go:167] duration metric: took 1m59.9466885s to libmachine.API.Create "ha-089400"
	I0408 18:56:49.606889    9012 start.go:293] postStartSetup for "ha-089400-m03" (driver="hyperv")
	I0408 18:56:49.606889    9012 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 18:56:49.617885    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 18:56:49.617885    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:51.878845    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:51.878845    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:51.879642    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:54.536473    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:54.536473    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:54.536473    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:56:54.658672    9012 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0407475s)
	I0408 18:56:54.671194    9012 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 18:56:54.677972    9012 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 18:56:54.677972    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 18:56:54.677972    9012 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 18:56:54.678627    9012 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 18:56:54.678627    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 18:56:54.690343    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 18:56:54.710088    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 18:56:54.756799    9012 start.go:296] duration metric: took 5.1498693s for postStartSetup
	I0408 18:56:54.760294    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:56:56.985684    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:56:56.985684    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:56.986556    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:56:59.645047    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:56:59.645047    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:56:59.645659    9012 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\config.json ...
	I0408 18:56:59.648173    9012 start.go:128] duration metric: took 2m9.9917629s to createHost
	I0408 18:56:59.648332    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:01.873316    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:01.873316    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:01.874193    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:04.481493    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:04.481720    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:04.490469    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:57:04.490469    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:57:04.490469    9012 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 18:57:04.620763    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744138624.626207453
	
	I0408 18:57:04.620867    9012 fix.go:216] guest clock: 1744138624.626207453
	I0408 18:57:04.620867    9012 fix.go:229] Guest: 2025-04-08 18:57:04.626207453 +0000 UTC Remote: 2025-04-08 18:56:59.6482541 +0000 UTC m=+572.350531301 (delta=4.977953353s)
	I0408 18:57:04.620867    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:06.862933    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:09.456551    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:09.457024    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:09.463156    9012 main.go:141] libmachine: Using SSH client type: native
	I0408 18:57:09.463246    9012 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.44.49 22 <nil> <nil>}
	I0408 18:57:09.463246    9012 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744138624
	I0408 18:57:09.606355    9012 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 18:57:04 UTC 2025
	
	I0408 18:57:09.606413    9012 fix.go:236] clock set: Tue Apr  8 18:57:04 UTC 2025
	 (err=<nil>)
	I0408 18:57:09.606413    9012 start.go:83] releasing machines lock for "ha-089400-m03", held for 2m19.9499241s
	I0408 18:57:09.606695    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:11.791247    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:11.791247    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:11.791962    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:14.417713    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:14.418406    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:14.421857    9012 out.go:177] * Found network options:
	I0408 18:57:14.424668    9012 out.go:177]   - NO_PROXY=172.22.47.59,172.22.34.212
	W0408 18:57:14.428989    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.428989    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:57:14.431299    9012 out.go:177]   - NO_PROXY=172.22.47.59,172.22.34.212
	W0408 18:57:14.433514    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.433514    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.434759    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 18:57:14.434759    9012 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 18:57:14.436661    9012 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 18:57:14.436661    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:14.445998    9012 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 18:57:14.445998    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400-m03 ).state
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:16.723423    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:16.737137    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:19.497145    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:19.497221    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:19.497317    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:57:19.529961    9012 main.go:141] libmachine: [stdout =====>] : 172.22.44.49
	
	I0408 18:57:19.529961    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:19.529961    9012 sshutil.go:53] new ssh client: &{IP:172.22.44.49 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400-m03\id_rsa Username:docker}
	I0408 18:57:19.590254    9012 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.1442155s)
	W0408 18:57:19.590314    9012 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 18:57:19.605254    9012 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 18:57:19.610725    9012 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1740233s)
	W0408 18:57:19.610725    9012 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 18:57:19.639489    9012 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 18:57:19.639489    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:57:19.639489    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:57:19.690681    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 18:57:19.723673    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 18:57:19.746185    9012 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 18:57:19.757685    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	W0408 18:57:19.771977    9012 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 18:57:19.771977    9012 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 18:57:19.790985    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:57:19.821175    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 18:57:19.856008    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 18:57:19.888368    9012 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 18:57:19.919485    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 18:57:19.954925    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 18:57:19.987391    9012 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 18:57:20.018411    9012 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 18:57:20.036318    9012 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 18:57:20.053328    9012 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 18:57:20.086862    9012 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 18:57:20.118234    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:20.338038    9012 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 18:57:20.372729    9012 start.go:495] detecting cgroup driver to use...
	I0408 18:57:20.385964    9012 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 18:57:20.422747    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:57:20.455789    9012 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 18:57:20.506708    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 18:57:20.541861    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:57:20.578644    9012 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 18:57:20.641182    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 18:57:20.668979    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 18:57:20.716621    9012 ssh_runner.go:195] Run: which cri-dockerd
	I0408 18:57:20.734585    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 18:57:20.751983    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 18:57:20.798385    9012 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 18:57:21.009212    9012 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 18:57:21.211789    9012 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 18:57:21.211789    9012 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 18:57:21.257255    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:21.452670    9012 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 18:57:24.049676    9012 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.5969857s)
	I0408 18:57:24.062881    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 18:57:24.102032    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:57:24.138499    9012 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 18:57:24.349941    9012 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 18:57:24.574649    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:24.773591    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 18:57:24.812527    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 18:57:24.848960    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:25.060559    9012 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 18:57:25.180808    9012 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 18:57:25.192910    9012 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 18:57:25.201786    9012 start.go:563] Will wait 60s for crictl version
	I0408 18:57:25.215756    9012 ssh_runner.go:195] Run: which crictl
	I0408 18:57:25.235084    9012 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 18:57:25.302200    9012 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 18:57:25.311559    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:57:25.358622    9012 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 18:57:25.399691    9012 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 18:57:25.402014    9012 out.go:177]   - env NO_PROXY=172.22.47.59
	I0408 18:57:25.404598    9012 out.go:177]   - env NO_PROXY=172.22.47.59,172.22.34.212
	I0408 18:57:25.406942    9012 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 18:57:25.411538    9012 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 18:57:25.417053    9012 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 18:57:25.417053    9012 ip.go:214] interface addr: 172.22.32.1/20
	I0408 18:57:25.429955    9012 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 18:57:25.437123    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:57:25.458585    9012 mustload.go:65] Loading cluster: ha-089400
	I0408 18:57:25.459419    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:57:25.459866    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:27.640199    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:27.640199    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:27.640199    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:57:27.641446    9012 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400 for IP: 172.22.44.49
	I0408 18:57:27.641532    9012 certs.go:194] generating shared ca certs ...
	I0408 18:57:27.641532    9012 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:27.642016    9012 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 18:57:27.642584    9012 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 18:57:27.643000    9012 certs.go:256] generating profile certs ...
	I0408 18:57:27.643527    9012 certs.go:359] skipping valid signed profile cert regeneration for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\client.key
	I0408 18:57:27.643614    9012 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b
	I0408 18:57:27.643614    9012 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.47.59 172.22.34.212 172.22.44.49 172.22.47.254]
	I0408 18:57:28.118513    9012 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b ...
	I0408 18:57:28.118513    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b: {Name:mkab25feccbb47b663d6b8f66287792a65330ab5 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:28.119480    9012 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b ...
	I0408 18:57:28.119480    9012 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b: {Name:mk5631061daee2ad7b71de1f1e07500b32b56953 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 18:57:28.121175    9012 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt.9408434b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt
	I0408 18:57:28.138147    9012 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key.9408434b -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key
	I0408 18:57:28.140163    9012 certs.go:359] skipping valid signed profile cert regeneration for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 18:57:28.140163    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 18:57:28.143814    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 18:57:28.144834    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 18:57:28.145512    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 18:57:28.145729    9012 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 18:57:28.145729    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 18:57:28.145729    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 18:57:28.146399    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 18:57:28.146833    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 18:57:28.147450    9012 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 18:57:28.147769    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 18:57:28.148185    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:28.148289    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 18:57:28.148631    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:30.345672    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:30.346680    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:30.346741    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:32.989097    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:57:32.989097    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:32.989747    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:57:33.088565    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.pub
	I0408 18:57:33.098689    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.pub --> memory (451 bytes)
	I0408 18:57:33.131324    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/sa.key
	I0408 18:57:33.139068    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/sa.key --> memory (1679 bytes)
	I0408 18:57:33.174910    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.crt
	I0408 18:57:33.181899    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.crt --> memory (1123 bytes)
	I0408 18:57:33.214005    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/front-proxy-ca.key
	I0408 18:57:33.220687    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/front-proxy-ca.key --> memory (1679 bytes)
	I0408 18:57:33.260034    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.crt
	I0408 18:57:33.268924    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.crt --> memory (1094 bytes)
	I0408 18:57:33.301239    9012 ssh_runner.go:195] Run: stat -c %s /var/lib/minikube/certs/etcd/ca.key
	I0408 18:57:33.307285    9012 ssh_runner.go:447] scp /var/lib/minikube/certs/etcd/ca.key --> memory (1675 bytes)
	I0408 18:57:33.327871    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 18:57:33.379676    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 18:57:33.431755    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 18:57:33.476802    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 18:57:33.525863    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1444 bytes)
	I0408 18:57:33.572048    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 18:57:33.622775    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 18:57:33.668995    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\ha-089400\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 18:57:33.716097    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 18:57:33.765127    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 18:57:33.812274    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 18:57:33.860163    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.pub (451 bytes)
	I0408 18:57:33.892180    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/sa.key (1679 bytes)
	I0408 18:57:33.922406    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.crt (1123 bytes)
	I0408 18:57:33.953869    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/front-proxy-ca.key (1679 bytes)
	I0408 18:57:33.982610    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.crt (1094 bytes)
	I0408 18:57:34.013819    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/certs/etcd/ca.key (1675 bytes)
	I0408 18:57:34.049345    9012 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (744 bytes)
	I0408 18:57:34.098860    9012 ssh_runner.go:195] Run: openssl version
	I0408 18:57:34.119302    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 18:57:34.151356    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.158638    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.169854    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 18:57:34.190202    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 18:57:34.221798    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 18:57:34.255567    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.262754    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.274318    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 18:57:34.297139    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 18:57:34.327129    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 18:57:34.364211    9012 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.372316    9012 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.384079    9012 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 18:57:34.405839    9012 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 18:57:34.436146    9012 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 18:57:34.442747    9012 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 18:57:34.442747    9012 kubeadm.go:934] updating node {m03 172.22.44.49 8443 v1.32.2 docker true true} ...
	I0408 18:57:34.443447    9012 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=ha-089400-m03 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.44.49
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:default APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 18:57:34.443447    9012 kube-vip.go:115] generating kube-vip config ...
	I0408 18:57:34.457668    9012 ssh_runner.go:195] Run: sudo sh -c "modprobe --all ip_vs ip_vs_rr ip_vs_wrr ip_vs_sh nf_conntrack"
	I0408 18:57:34.487506    9012 kube-vip.go:167] auto-enabling control-plane load-balancing in kube-vip
	I0408 18:57:34.487621    9012 kube-vip.go:137] kube-vip config:
	apiVersion: v1
	kind: Pod
	metadata:
	  creationTimestamp: null
	  name: kube-vip
	  namespace: kube-system
	spec:
	  containers:
	  - args:
	    - manager
	    env:
	    - name: vip_arp
	      value: "true"
	    - name: port
	      value: "8443"
	    - name: vip_nodename
	      valueFrom:
	        fieldRef:
	          fieldPath: spec.nodeName
	    - name: vip_interface
	      value: eth0
	    - name: vip_cidr
	      value: "32"
	    - name: dns_mode
	      value: first
	    - name: cp_enable
	      value: "true"
	    - name: cp_namespace
	      value: kube-system
	    - name: vip_leaderelection
	      value: "true"
	    - name: vip_leasename
	      value: plndr-cp-lock
	    - name: vip_leaseduration
	      value: "5"
	    - name: vip_renewdeadline
	      value: "3"
	    - name: vip_retryperiod
	      value: "1"
	    - name: address
	      value: 172.22.47.254
	    - name: prometheus_server
	      value: :2112
	    - name : lb_enable
	      value: "true"
	    - name: lb_port
	      value: "8443"
	    image: ghcr.io/kube-vip/kube-vip:v0.8.10
	    imagePullPolicy: IfNotPresent
	    name: kube-vip
	    resources: {}
	    securityContext:
	      capabilities:
	        add:
	        - NET_ADMIN
	        - NET_RAW
	    volumeMounts:
	    - mountPath: /etc/kubernetes/admin.conf
	      name: kubeconfig
	  hostAliases:
	  - hostnames:
	    - kubernetes
	    ip: 127.0.0.1
	  hostNetwork: true
	  volumes:
	  - hostPath:
	      path: "/etc/kubernetes/admin.conf"
	    name: kubeconfig
	status: {}
	I0408 18:57:34.499390    9012 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 18:57:34.520019    9012 binaries.go:47] Didn't find k8s binaries: sudo ls /var/lib/minikube/binaries/v1.32.2: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/var/lib/minikube/binaries/v1.32.2': No such file or directory
	
	Initiating transfer...
	I0408 18:57:34.531309    9012 ssh_runner.go:195] Run: sudo mkdir -p /var/lib/minikube/binaries/v1.32.2
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubeadm.sha256
	I0408 18:57:34.551707    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm -> /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubelet.sha256
	I0408 18:57:34.551707    9012 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/linux/amd64/kubectl.sha256
	I0408 18:57:34.552541    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl -> /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:57:34.566871    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm
	I0408 18:57:34.567867    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:57:34.569497    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl
	I0408 18:57:34.575644    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubeadm: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubeadm: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubeadm': No such file or directory
	I0408 18:57:34.575644    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubeadm --> /var/lib/minikube/binaries/v1.32.2/kubeadm (70942872 bytes)
	I0408 18:57:34.628587    9012 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet -> /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:57:34.628587    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubectl: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubectl: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubectl': No such file or directory
	I0408 18:57:34.628829    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubectl --> /var/lib/minikube/binaries/v1.32.2/kubectl (57323672 bytes)
	I0408 18:57:34.642503    9012 ssh_runner.go:195] Run: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet
	I0408 18:57:34.682115    9012 ssh_runner.go:352] existence check for /var/lib/minikube/binaries/v1.32.2/kubelet: stat -c "%s %y" /var/lib/minikube/binaries/v1.32.2/kubelet: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/binaries/v1.32.2/kubelet': No such file or directory
	I0408 18:57:34.682227    9012 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\linux\amd64\v1.32.2/kubelet --> /var/lib/minikube/binaries/v1.32.2/kubelet (77406468 bytes)
	I0408 18:57:36.002421    9012 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /etc/kubernetes/manifests
	I0408 18:57:36.021630    9012 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (312 bytes)
	I0408 18:57:36.054894    9012 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 18:57:36.089926    9012 ssh_runner.go:362] scp memory --> /etc/kubernetes/manifests/kube-vip.yaml (1441 bytes)
	I0408 18:57:36.137467    9012 ssh_runner.go:195] Run: grep 172.22.47.254	control-plane.minikube.internal$ /etc/hosts
	I0408 18:57:36.144566    9012 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.47.254	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 18:57:36.179081    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:57:36.398442    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:57:36.435035    9012 host.go:66] Checking if "ha-089400" exists ...
	I0408 18:57:36.435952    9012 start.go:317] joinCluster: &{Name:ha-089400 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:ha-089400 Namespace:def
ault APIServerHAVIP:172.22.47.254 APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.47.59 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP:172.22.34.212 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:false efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget
:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:false storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOpti
mizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 18:57:36.435952    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0"
	I0408 18:57:36.435952    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM ha-089400 ).state
	I0408 18:57:38.631812    9012 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 18:57:38.631812    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:38.632906    9012 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM ha-089400 ).networkadapters[0]).ipaddresses[0]
	I0408 18:57:41.313133    9012 main.go:141] libmachine: [stdout =====>] : 172.22.47.59
	
	I0408 18:57:41.313308    9012 main.go:141] libmachine: [stderr =====>] : 
	I0408 18:57:41.313390    9012 sshutil.go:53] new ssh client: &{IP:172.22.47.59 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\ha-089400\id_rsa Username:docker}
	I0408 18:57:41.532765    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm token create --print-join-command --ttl=0": (5.0967729s)
	I0408 18:57:41.533942    9012 start.go:343] trying to join control-plane node "m03" to cluster: &{Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:57:41.533942    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 3txpsc.ry8va8iswsjgjcej --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m03 --control-plane --apiserver-advertise-address=172.22.44.49 --apiserver-bind-port=8443"
	I0408 18:58:24.723444    9012 ssh_runner.go:235] Completed: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm join control-plane.minikube.internal:8443 --token 3txpsc.ry8va8iswsjgjcej --discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 --ignore-preflight-errors=all --cri-socket unix:///var/run/cri-dockerd.sock --node-name=ha-089400-m03 --control-plane --apiserver-advertise-address=172.22.44.49 --apiserver-bind-port=8443": (43.1891609s)
	I0408 18:58:24.723587    9012 ssh_runner.go:195] Run: /bin/bash -c "sudo systemctl daemon-reload && sudo systemctl enable kubelet && sudo systemctl start kubelet"
	I0408 18:58:25.714279    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes ha-089400-m03 minikube.k8s.io/updated_at=2025_04_08T18_58_25_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=ha-089400 minikube.k8s.io/primary=false
	I0408 18:58:25.892117    9012 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig taint nodes ha-089400-m03 node-role.kubernetes.io/control-plane:NoSchedule-
	I0408 18:58:26.059827    9012 start.go:319] duration metric: took 49.6234835s to joinCluster
	I0408 18:58:26.059827    9012 start.go:235] Will wait 6m0s for node &{Name:m03 IP:172.22.44.49 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 18:58:26.060813    9012 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:58:26.063824    9012 out.go:177] * Verifying Kubernetes components...
	I0408 18:58:26.078811    9012 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 18:58:26.502070    9012 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 18:58:26.555832    9012 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:58:26.555832    9012 kapi.go:59] client config for ha-089400: &rest.Config{Host:"https://172.22.47.254:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\ha-089400\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAData:[]uint8(nil), Nex
tProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	W0408 18:58:26.555832    9012 kubeadm.go:483] Overriding stale ClientConfig host https://172.22.47.254:8443 with https://172.22.47.59:8443
	I0408 18:58:26.556916    9012 node_ready.go:35] waiting up to 6m0s for node "ha-089400-m03" to be "Ready" ...
	I0408 18:58:26.556916    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:26.556916    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:26.556916    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:26.556916    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:26.574567    9012 round_trippers.go:581] Response Status: 200 OK in 17 milliseconds
	I0408 18:58:27.057716    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:27.057716    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:27.057716    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:27.057716    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:27.066088    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:27.558007    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:27.558074    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:27.558074    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:27.558074    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:27.564660    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:28.057118    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:28.057118    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:28.057118    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:28.057118    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:28.068170    9012 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 18:58:28.557898    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:28.557898    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:28.557898    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:28.557898    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:28.563830    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:28.563830    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:29.057369    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:29.057369    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:29.057369    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:29.057369    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:29.065477    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:29.557993    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:29.557993    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:29.557993    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:29.558109    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:29.577427    9012 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 18:58:30.057653    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:30.057653    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:30.057653    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:30.057653    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:30.063904    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:30.557846    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:30.557870    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:30.557870    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:30.557870    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:30.565528    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:30.566223    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:31.057701    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:31.057701    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:31.057701    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:31.057701    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:31.166220    9012 round_trippers.go:581] Response Status: 200 OK in 108 milliseconds
	I0408 18:58:31.557420    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:31.557420    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:31.557420    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:31.557420    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:31.563677    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:32.057182    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:32.057182    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:32.057182    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:32.057182    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:32.067623    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:32.557428    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:32.557428    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:32.557428    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:32.557428    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:32.562869    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:33.057240    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:33.057240    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:33.057240    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:33.057240    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:33.063773    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:33.063921    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:33.557648    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:33.557648    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:33.557648    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:33.557648    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:33.618078    9012 round_trippers.go:581] Response Status: 200 OK in 60 milliseconds
	I0408 18:58:34.057142    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:34.057142    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:34.057142    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:34.057142    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:34.068528    9012 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 18:58:34.557191    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:34.557191    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:34.557191    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:34.557191    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:34.569955    9012 round_trippers.go:581] Response Status: 200 OK in 12 milliseconds
	I0408 18:58:35.057623    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:35.057623    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:35.057623    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:35.057623    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:35.064281    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:35.064786    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:35.557087    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:35.557087    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:35.557087    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:35.557087    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:35.565250    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:36.057772    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:36.057772    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:36.057772    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:36.057772    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:36.063816    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:36.558033    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:36.558033    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:36.558033    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:36.558033    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:36.563695    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.057420    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:37.057420    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:37.057900    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:37.057900    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:37.063183    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.557506    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:37.557506    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:37.557506    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:37.557506    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:37.562983    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:37.563546    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:38.057305    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:38.057305    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:38.057305    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:38.057305    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:38.063559    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:38.557964    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:38.557964    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:38.557964    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:38.558055    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:38.566553    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:39.057835    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:39.057925    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:39.057925    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:39.057925    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:39.063365    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:39.557139    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:39.557139    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:39.557139    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:39.557139    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:39.563297    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:40.057668    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:40.057668    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:40.058112    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:40.058112    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:40.063774    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:40.063808    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:40.557027    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:40.557027    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:40.557027    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:40.557027    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:40.561855    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:41.057228    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:41.057228    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:41.057228    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:41.057228    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:41.065918    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:41.558287    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:41.558287    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:41.558287    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:41.558377    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:41.563341    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:42.057440    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:42.057440    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:42.057440    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:42.057440    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:42.065554    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:42.066588    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:42.558304    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:42.558401    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:42.558401    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:42.558401    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:42.564688    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:43.057225    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:43.057225    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:43.057225    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:43.057225    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:43.063203    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:43.558245    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:43.558336    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:43.558336    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:43.558336    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:43.563347    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:44.057407    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:44.057407    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:44.057407    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:44.057407    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:44.063500    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:44.557613    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:44.557613    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:44.557613    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:44.557613    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:44.564906    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:44.566019    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:45.057289    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:45.057289    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:45.057289    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:45.057289    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:45.061835    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:45.558244    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:45.558311    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:45.558373    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:45.558373    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:45.562822    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:46.057400    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:46.057400    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:46.057400    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:46.057400    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:46.066945    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:46.558118    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:46.558118    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:46.558118    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:46.558118    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:46.564500    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:47.057403    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:47.057403    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:47.057403    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:47.057403    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:47.063190    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:47.063734    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:47.557393    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:47.557393    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:47.557393    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:47.557393    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:47.563095    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:48.058596    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:48.058739    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:48.058739    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:48.058739    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:48.064140    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:48.557624    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:48.557624    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:48.557624    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:48.557624    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:48.563920    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:49.058063    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:49.058178    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:49.058178    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:49.058178    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:49.063346    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:49.557756    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:49.557756    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:49.557756    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:49.557756    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:49.567498    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:49.568031    9012 node_ready.go:53] node "ha-089400-m03" has status "Ready":"False"
	I0408 18:58:50.058138    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.058138    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.058138    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.058138    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.064332    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:50.557611    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.557611    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.557611    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.557611    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.565791    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:50.566051    9012 node_ready.go:49] node "ha-089400-m03" has status "Ready":"True"
	I0408 18:58:50.566199    9012 node_ready.go:38] duration metric: took 24.0090946s for node "ha-089400-m03" to be "Ready" ...
	I0408 18:58:50.566263    9012 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:58:50.566440    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:50.566471    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.566471    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.566471    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.574664    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:50.576627    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.576627    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7f57d
	I0408 18:58:50.576627    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.576627    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.576627    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.580609    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:58:50.581595    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.581595    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.581595    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.581595    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.585597    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.586595    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.586595    9012 pod_ready.go:82] duration metric: took 9.968ms for pod "coredns-668d6bf9bc-7f57d" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.586595    9012 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.586595    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-7v5zn
	I0408 18:58:50.586595    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.586595    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.586595    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.590699    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.591085    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.591085    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.591148    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.591148    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.594918    9012 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 18:58:50.595148    9012 pod_ready.go:93] pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.595148    9012 pod_ready.go:82] duration metric: took 8.5525ms for pod "coredns-668d6bf9bc-7v5zn" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.595148    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.595264    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400
	I0408 18:58:50.595264    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.595341    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.595341    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.599719    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.600022    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:50.600022    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.600022    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.600022    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.602649    9012 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 18:58:50.603743    9012 pod_ready.go:93] pod "etcd-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.603743    9012 pod_ready.go:82] duration metric: took 8.5949ms for pod "etcd-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.603743    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.603743    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m02
	I0408 18:58:50.603743    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.603743    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.603743    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.606495    9012 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 18:58:50.608222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:50.608284    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.608284    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.608284    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.612553    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:50.612553    9012 pod_ready.go:93] pod "etcd-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.612553    9012 pod_ready.go:82] duration metric: took 8.8103ms for pod "etcd-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.612553    9012 pod_ready.go:79] waiting up to 6m0s for pod "etcd-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.758816    9012 request.go:661] Waited for 146.2615ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m03
	I0408 18:58:50.759305    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/etcd-ha-089400-m03
	I0408 18:58:50.759305    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.759305    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.759305    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.765654    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:50.957667    9012 request.go:661] Waited for 191.5067ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.957667    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:50.957667    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:50.957667    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:50.957667    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:50.963746    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:50.964235    9012 pod_ready.go:93] pod "etcd-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:50.964281    9012 pod_ready.go:82] duration metric: took 351.7248ms for pod "etcd-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:50.964314    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.158748    9012 request.go:661] Waited for 194.3919ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:58:51.158748    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400
	I0408 18:58:51.158748    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.159157    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.159157    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.164904    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.358234    9012 request.go:661] Waited for 192.4936ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:51.358234    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:51.358658    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.358739    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.358739    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.363836    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.363836    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:51.363836    9012 pod_ready.go:82] duration metric: took 399.5187ms for pod "kube-apiserver-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.363836    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.558066    9012 request.go:661] Waited for 194.2287ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:58:51.558066    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m02
	I0408 18:58:51.558066    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.558066    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.558066    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.563265    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:51.757692    9012 request.go:661] Waited for 193.9221ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:51.757692    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:51.757692    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.757692    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.757692    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.764468    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:51.765070    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:51.765125    9012 pod_ready.go:82] duration metric: took 401.2857ms for pod "kube-apiserver-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.765125    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:51.958012    9012 request.go:661] Waited for 192.7415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m03
	I0408 18:58:51.958012    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-ha-089400-m03
	I0408 18:58:51.958012    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:51.958012    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:51.958012    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:51.964420    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:52.159050    9012 request.go:661] Waited for 194.6286ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:52.159050    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:52.159438    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.159438    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.159438    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.166788    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:52.167159    9012 pod_ready.go:93] pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.167159    9012 pod_ready.go:82] duration metric: took 402.0314ms for pod "kube-apiserver-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.167159    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.358361    9012 request.go:661] Waited for 191.2004ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:58:52.358361    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400
	I0408 18:58:52.358361    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.358361    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.358361    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.365052    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:52.558676    9012 request.go:661] Waited for 193.1294ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:52.559187    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:52.559187    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.559187    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.559187    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.565001    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:52.565896    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.565896    9012 pod_ready.go:82] duration metric: took 398.7335ms for pod "kube-controller-manager-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.565896    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.758504    9012 request.go:661] Waited for 192.4303ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:58:52.759025    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m02
	I0408 18:58:52.759025    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.759025    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.759025    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.764070    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:52.958887    9012 request.go:661] Waited for 193.4702ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:52.958887    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:52.958887    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:52.958887    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:52.958887    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:52.969210    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:52.970659    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:52.970710    9012 pod_ready.go:82] duration metric: took 404.8107ms for pod "kube-controller-manager-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:52.970710    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.158477    9012 request.go:661] Waited for 187.6617ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m03
	I0408 18:58:53.158477    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-ha-089400-m03
	I0408 18:58:53.158477    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.158477    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.158477    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.163516    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:53.357751    9012 request.go:661] Waited for 193.07ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:53.357751    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:53.358342    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.358396    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.358396    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.364143    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:53.364674    9012 pod_ready.go:93] pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:53.364674    9012 pod_ready.go:82] duration metric: took 393.9615ms for pod "kube-controller-manager-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.364674    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.558357    9012 request.go:661] Waited for 193.5726ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:58:53.558859    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-c4hjd
	I0408 18:58:53.558859    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.558859    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.558859    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.563766    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:53.758429    9012 request.go:661] Waited for 193.9984ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:53.758429    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:53.758897    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.758897    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.758897    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.765902    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:53.766344    9012 pod_ready.go:93] pod "kube-proxy-c4hjd" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:53.766344    9012 pod_ready.go:82] duration metric: took 401.6663ms for pod "kube-proxy-c4hjd" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.766526    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-cqx6b" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:53.957910    9012 request.go:661] Waited for 191.3552ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-cqx6b
	I0408 18:58:53.958458    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-cqx6b
	I0408 18:58:53.958458    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:53.958458    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:53.958458    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:53.963298    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:54.158329    9012 request.go:661] Waited for 194.1424ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:54.158329    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:54.158329    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.158329    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.158329    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.167135    9012 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 18:58:54.167576    9012 pod_ready.go:93] pod "kube-proxy-cqx6b" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.167666    9012 pod_ready.go:82] duration metric: took 401.1373ms for pod "kube-proxy-cqx6b" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.167666    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.357803    9012 request.go:661] Waited for 189.9475ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:58:54.357803    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-proxy-gf6wz
	I0408 18:58:54.357803    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.357803    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.357803    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.363188    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:54.558763    9012 request.go:661] Waited for 194.5535ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.558763    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.558763    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.558763    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.558763    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.569898    9012 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 18:58:54.570238    9012 pod_ready.go:93] pod "kube-proxy-gf6wz" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.570329    9012 pod_ready.go:82] duration metric: took 402.6599ms for pod "kube-proxy-gf6wz" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.570329    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.757885    9012 request.go:661] Waited for 187.4496ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:58:54.757885    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400
	I0408 18:58:54.757885    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.757885    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.757885    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.764202    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:54.958088    9012 request.go:661] Waited for 193.4099ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.958088    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400
	I0408 18:58:54.958088    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:54.958088    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:54.958088    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:54.963589    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:54.963960    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:54.964018    9012 pod_ready.go:82] duration metric: took 393.6855ms for pod "kube-scheduler-ha-089400" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:54.964076    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.158025    9012 request.go:661] Waited for 193.8566ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:58:55.158025    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m02
	I0408 18:58:55.158025    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.158025    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.158025    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.164478    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:55.358816    9012 request.go:661] Waited for 193.8344ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:55.359799    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m02
	I0408 18:58:55.359976    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.360053    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.360106    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.365072    9012 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 18:58:55.365523    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:55.365523    9012 pod_ready.go:82] duration metric: took 401.4445ms for pod "kube-scheduler-ha-089400-m02" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.365624    9012 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.558315    9012 request.go:661] Waited for 192.6887ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m03
	I0408 18:58:55.558792    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-ha-089400-m03
	I0408 18:58:55.558858    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.558858    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.558858    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.565082    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:55.758505    9012 request.go:661] Waited for 192.7752ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:55.758505    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes/ha-089400-m03
	I0408 18:58:55.758505    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.758505    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.758505    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.766503    9012 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 18:58:55.766916    9012 pod_ready.go:93] pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace has status "Ready":"True"
	I0408 18:58:55.766976    9012 pod_ready.go:82] duration metric: took 401.3479ms for pod "kube-scheduler-ha-089400-m03" in "kube-system" namespace to be "Ready" ...
	I0408 18:58:55.766976    9012 pod_ready.go:39] duration metric: took 5.2006432s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 18:58:55.767035    9012 api_server.go:52] waiting for apiserver process to appear ...
	I0408 18:58:55.777864    9012 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 18:58:55.803319    9012 api_server.go:72] duration metric: took 29.7432227s to wait for apiserver process to appear ...
	I0408 18:58:55.803344    9012 api_server.go:88] waiting for apiserver healthz status ...
	I0408 18:58:55.803402    9012 api_server.go:253] Checking apiserver healthz at https://172.22.47.59:8443/healthz ...
	I0408 18:58:55.814276    9012 api_server.go:279] https://172.22.47.59:8443/healthz returned 200:
	ok
	I0408 18:58:55.814471    9012 round_trippers.go:470] GET https://172.22.47.59:8443/version
	I0408 18:58:55.814471    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.814471    9012 round_trippers.go:480]     Accept: application/json, */*
	I0408 18:58:55.814471    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.815814    9012 round_trippers.go:581] Response Status: 200 OK in 1 milliseconds
	I0408 18:58:55.815907    9012 api_server.go:141] control plane version: v1.32.2
	I0408 18:58:55.815907    9012 api_server.go:131] duration metric: took 12.5631ms to wait for apiserver health ...
	I0408 18:58:55.815907    9012 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 18:58:55.958767    9012 request.go:661] Waited for 142.8582ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:55.958767    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:55.958767    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:55.958767    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:55.958767    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:55.968028    9012 round_trippers.go:581] Response Status: 200 OK in 9 milliseconds
	I0408 18:58:55.970434    9012 system_pods.go:59] 24 kube-system pods found
	I0408 18:58:55.970988    9012 system_pods.go:61] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "etcd-ha-089400-m03" [7a2a97ec-01c9-422a-bfd1-26e763146ff8] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-jtrnl" [e9d6e73b-0bca-4d4c-a306-488d28a5ac38] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-apiserver-ha-089400-m03" [3dd306b1-11cd-4e5b-acc8-9cd1fe25236c] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-controller-manager-ha-089400-m03" [6f33fa37-94f7-4dda-9a49-c128d0f8555f] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-cqx6b" [f17cfde2-a18e-418f-922f-8d36b3c2b976] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-scheduler-ha-089400-m03" [f6ffda8d-6120-48b0-a2ef-0a3212d83fd4] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "kube-vip-ha-089400-m03" [4d6da5dc-4ccd-4c21-8a42-7373be544476] Running
	I0408 18:58:55.970988    9012 system_pods.go:61] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:58:55.970988    9012 system_pods.go:74] duration metric: took 155.0796ms to wait for pod list to return data ...
	I0408 18:58:55.970988    9012 default_sa.go:34] waiting for default service account to be created ...
	I0408 18:58:56.158445    9012 request.go:661] Waited for 187.4549ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:58:56.158793    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/default/serviceaccounts
	I0408 18:58:56.158793    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.158793    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.158793    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.164766    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:56.164766    9012 default_sa.go:45] found service account: "default"
	I0408 18:58:56.164766    9012 default_sa.go:55] duration metric: took 193.7763ms for default service account to be created ...
	I0408 18:58:56.164766    9012 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 18:58:56.358222    9012 request.go:661] Waited for 193.4539ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:56.358222    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/namespaces/kube-system/pods
	I0408 18:58:56.358222    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.358222    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.358222    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.363606    9012 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 18:58:56.366152    9012 system_pods.go:86] 24 kube-system pods found
	I0408 18:58:56.366152    9012 system_pods.go:89] "coredns-668d6bf9bc-7f57d" [03d011a2-5411-4a42-b0c4-cbcd308c0106] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "coredns-668d6bf9bc-7v5zn" [2e722ffb-b2ba-4cce-8ad8-4cea887eb532] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "etcd-ha-089400" [c78798a4-4b49-48d8-a0a6-4503aaae277b] Running
	I0408 18:58:56.366152    9012 system_pods.go:89] "etcd-ha-089400-m02" [e1c8cb62-d242-42ab-83e6-d10380c7a137] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "etcd-ha-089400-m03" [7a2a97ec-01c9-422a-bfd1-26e763146ff8] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-jtrnl" [e9d6e73b-0bca-4d4c-a306-488d28a5ac38] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-r2jmm" [21185057-5192-426b-a5c0-28aa889dd3f4] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kindnet-tsg62" [0ff8dbcd-506b-45ab-8ba3-db2656349e7b] Running
	I0408 18:58:56.366370    9012 system_pods.go:89] "kube-apiserver-ha-089400" [ec320438-3f31-4e51-b467-37b8178dd37c] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-apiserver-ha-089400-m02" [04965b8a-d16e-4820-a8f8-e033e559c5a3] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-apiserver-ha-089400-m03" [3dd306b1-11cd-4e5b-acc8-9cd1fe25236c] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400" [756a09c0-dce5-42e6-8c19-608f37c81d94] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m02" [5356d1cd-cc15-4957-919c-67e8d6998204] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-controller-manager-ha-089400-m03" [6f33fa37-94f7-4dda-9a49-c128d0f8555f] Running
	I0408 18:58:56.366583    9012 system_pods.go:89] "kube-proxy-c4hjd" [f0acb529-4fa0-4af4-9a6f-1e3a608b85c3] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-proxy-cqx6b" [f17cfde2-a18e-418f-922f-8d36b3c2b976] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-proxy-gf6wz" [fdcdeaf7-4ac5-4f03-a079-9e15867653c2] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400" [0a3f7e31-3b6b-4959-8af9-c094019f75ec] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400-m02" [f6fee91b-5458-42d7-9d7d-5789c58e6e3a] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-scheduler-ha-089400-m03" [f6ffda8d-6120-48b0-a2ef-0a3212d83fd4] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400" [8c408631-92b5-45d7-9212-909e0871c699] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400-m02" [050aa623-b4d8-4069-bd0e-6297fbba07f0] Running
	I0408 18:58:56.366658    9012 system_pods.go:89] "kube-vip-ha-089400-m03" [4d6da5dc-4ccd-4c21-8a42-7373be544476] Running
	I0408 18:58:56.366737    9012 system_pods.go:89] "storage-provisioner" [d7abc993-16df-449c-baf4-7b53a08a2d10] Running
	I0408 18:58:56.366737    9012 system_pods.go:126] duration metric: took 201.9694ms to wait for k8s-apps to be running ...
	I0408 18:58:56.366737    9012 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 18:58:56.377875    9012 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 18:58:56.405628    9012 system_svc.go:56] duration metric: took 38.8906ms WaitForService to wait for kubelet
	I0408 18:58:56.405694    9012 kubeadm.go:582] duration metric: took 30.3456284s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 18:58:56.405756    9012 node_conditions.go:102] verifying NodePressure condition ...
	I0408 18:58:56.557669    9012 request.go:661] Waited for 151.8063ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.47.59:8443/api/v1/nodes
	I0408 18:58:56.557669    9012 round_trippers.go:470] GET https://172.22.47.59:8443/api/v1/nodes
	I0408 18:58:56.557669    9012 round_trippers.go:476] Request Headers:
	I0408 18:58:56.557669    9012 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 18:58:56.557669    9012 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 18:58:56.563976    9012 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 18:58:56.564760    9012 node_conditions.go:123] node cpu capacity is 2
	I0408 18:58:56.564760    9012 node_conditions.go:105] duration metric: took 159.0026ms to run NodePressure ...
	I0408 18:58:56.564760    9012 start.go:241] waiting for startup goroutines ...
	I0408 18:58:56.564760    9012 start.go:255] writing updated cluster config ...
	I0408 18:58:56.578721    9012 ssh_runner.go:195] Run: rm -f paused
	I0408 18:58:56.727889    9012 start.go:600] kubectl: 1.32.3, cluster: 1.32.2 (minor skew: 0)
	I0408 18:58:56.733188    9012 out.go:177] * Done! kubectl is now configured to use "ha-089400" cluster and "default" namespace by default
	
	
	==> Docker <==
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/b25d2f72ffcd40fc1387d63ac907f17f915bef81b8cb3473beaacbc88f842657/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/e32d687b8d5424c201e5c86be4f7e87c86e799fad9931fd10ac8786152ae94a8/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:05 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:51:05Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/34766421d15c3e00899ddcc49479c11696dc0e4a373a189c5cbafca2ad9bef91/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.037244286Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.038353572Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.038532370Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.039418559Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398095320Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398223718Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398305317Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.398737112Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428088257Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428315254Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.428407353Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:51:06 ha-089400 dockerd[1447]: time="2025-04-08T18:51:06.429979834Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904122069Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904346270Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.904370770Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:35 ha-089400 dockerd[1447]: time="2025-04-08T18:59:35.905500575Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:36 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:59:36Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/a007107dd26d0a9a88a391781fffcf20b8829b0e935e8a9f8625b68e633045aa/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 18:59:37 ha-089400 cri-dockerd[1340]: time="2025-04-08T18:59:37Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.161294936Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.162745343Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.162938144Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 18:59:38 ha-089400 dockerd[1447]: time="2025-04-08T18:59:38.163240945Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	e05ddb65240a1       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   18 minutes ago      Running             busybox                   0                   a007107dd26d0       busybox-58667487b6-d76nt
	24b3936e36154       c69fa2e9cbf5f                                                                                         26 minutes ago      Running             coredns                   0                   e32d687b8d542       coredns-668d6bf9bc-7f57d
	991a5e9234386       c69fa2e9cbf5f                                                                                         26 minutes ago      Running             coredns                   0                   34766421d15c3       coredns-668d6bf9bc-7v5zn
	ad2a3890b583d       6e38f40d628db                                                                                         26 minutes ago      Running             storage-provisioner       0                   b25d2f72ffcd4       storage-provisioner
	ea0911610b904       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              27 minutes ago      Running             kindnet-cni               0                   eb19661019a60       kindnet-tsg62
	1fa28a71c2bd6       f1332858868e1                                                                                         27 minutes ago      Running             kube-proxy                0                   2523814f4415a       kube-proxy-gf6wz
	b1828f8a97fe6       ghcr.io/kube-vip/kube-vip@sha256:e01c90bcdd3eb37a46aaf04f6c86cca3e66dd0db7a231f3c8e8aa105635c158a     27 minutes ago      Running             kube-vip                  0                   b65dfebdf40c5       kube-vip-ha-089400
	f13d6fc3e9492       85b7a174738ba                                                                                         27 minutes ago      Running             kube-apiserver            0                   6040dca8f6bf8       kube-apiserver-ha-089400
	6969b47aa676b       d8e673e7c9983                                                                                         27 minutes ago      Running             kube-scheduler            0                   e045296fcc947       kube-scheduler-ha-089400
	64cc2cd29fdc7       b6a454c5a800d                                                                                         27 minutes ago      Running             kube-controller-manager   0                   25a3cf2e261a7       kube-controller-manager-ha-089400
	dd0000ba2b8fc       a9e7e6b294baf                                                                                         27 minutes ago      Running             etcd                      0                   82a6814d9840b       etcd-ha-089400
	
	
	==> coredns [24b3936e3615] <==
	[INFO] 10.244.0.4:46731 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000197401s
	[INFO] 10.244.1.2:57130 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000228401s
	[INFO] 10.244.1.2:51995 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000149101s
	[INFO] 10.244.1.2:53167 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000151401s
	[INFO] 10.244.2.2:60530 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000172901s
	[INFO] 10.244.2.2:55628 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000147001s
	[INFO] 10.244.2.2:46073 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000154901s
	[INFO] 10.244.2.2:49601 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,aa,rd,ra 111 0.0000672s
	[INFO] 10.244.2.2:48545 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000096601s
	[INFO] 10.244.0.4:56197 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000190201s
	[INFO] 10.244.0.4:40945 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000384702s
	[INFO] 10.244.1.2:42589 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000340401s
	[INFO] 10.244.1.2:33657 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000157001s
	[INFO] 10.244.1.2:43921 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0001399s
	[INFO] 10.244.2.2:54742 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000284102s
	[INFO] 10.244.2.2:51046 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000296302s
	[INFO] 10.244.2.2:51819 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0001481s
	[INFO] 10.244.0.4:39976 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.0001915s
	[INFO] 10.244.0.4:33463 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000206901s
	[INFO] 10.244.0.4:47150 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0001754s
	[INFO] 10.244.1.2:37200 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000286201s
	[INFO] 10.244.1.2:33497 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000293201s
	[INFO] 10.244.1.2:37779 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0000742s
	[INFO] 10.244.2.2:42787 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000173401s
	[INFO] 10.244.2.2:52825 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.0000747s
	
	
	==> coredns [991a5e923438] <==
	[INFO] 10.244.1.2:39417 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd 60 0.000081801s
	[INFO] 10.244.1.2:36458 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.059506783s
	[INFO] 10.244.2.2:38667 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.001155705s
	[INFO] 10.244.2.2:36314 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,aa,rd 60 0.000176901s
	[INFO] 10.244.0.4:59697 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000229502s
	[INFO] 10.244.0.4:41143 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000245502s
	[INFO] 10.244.0.4:60072 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000385602s
	[INFO] 10.244.0.4:53645 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000273902s
	[INFO] 10.244.0.4:40792 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000203801s
	[INFO] 10.244.1.2:49773 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.117505829s
	[INFO] 10.244.1.2:39680 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000262202s
	[INFO] 10.244.1.2:39040 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000128901s
	[INFO] 10.244.1.2:50379 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.003319015s
	[INFO] 10.244.1.2:47430 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000232401s
	[INFO] 10.244.2.2:56517 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,aa,rd,ra 111 0.000378602s
	[INFO] 10.244.2.2:38313 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000208201s
	[INFO] 10.244.2.2:37461 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.0000729s
	[INFO] 10.244.0.4:36721 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000271402s
	[INFO] 10.244.0.4:42158 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000168801s
	[INFO] 10.244.1.2:41784 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000258901s
	[INFO] 10.244.2.2:46777 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000198901s
	[INFO] 10.244.0.4:34099 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000235401s
	[INFO] 10.244.1.2:36014 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.0001672s
	[INFO] 10.244.2.2:54303 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.0001955s
	[INFO] 10.244.2.2:60514 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.000224901s
	
	
	==> describe nodes <==
	Name:               ha-089400
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T18_50_37_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:50:32 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:17:39 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:13:23 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:13:23 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:13:23 +0000   Tue, 08 Apr 2025 18:50:29 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:13:23 +0000   Tue, 08 Apr 2025 18:51:04 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.47.59
	  Hostname:    ha-089400
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 b5f60f03694b46f6bab79c049c06cf8e
	  System UUID:                0ea6b7ae-aa8c-764b-840e-6fae09375fe1
	  Boot ID:                    44cd1066-73d1-4beb-9f2d-5ae36e102363
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (11 in total)
	  Namespace                   Name                                 CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                 ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-d76nt             0 (0%)        0 (0%)      0 (0%)           0 (0%)         18m
	  kube-system                 coredns-668d6bf9bc-7f57d             100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     27m
	  kube-system                 coredns-668d6bf9bc-7v5zn             100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     27m
	  kube-system                 etcd-ha-089400                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         27m
	  kube-system                 kindnet-tsg62                        100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      27m
	  kube-system                 kube-apiserver-ha-089400             250m (12%)    0 (0%)      0 (0%)           0 (0%)         27m
	  kube-system                 kube-controller-manager-ha-089400    200m (10%)    0 (0%)      0 (0%)           0 (0%)         27m
	  kube-system                 kube-proxy-gf6wz                     0 (0%)        0 (0%)      0 (0%)           0 (0%)         27m
	  kube-system                 kube-scheduler-ha-089400             100m (5%)     0 (0%)      0 (0%)           0 (0%)         27m
	  kube-system                 kube-vip-ha-089400                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         27m
	  kube-system                 storage-provisioner                  0 (0%)        0 (0%)      0 (0%)           0 (0%)         27m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                950m (47%)   100m (5%)
	  memory             290Mi (13%)  390Mi (18%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 27m                kube-proxy       
	  Normal  NodeHasSufficientPID     27m (x7 over 27m)  kubelet          Node ha-089400 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  27m                kubelet          Updated Node Allocatable limit across pods
	  Normal  Starting                 27m                kubelet          Starting kubelet.
	  Normal  NodeHasSufficientMemory  27m (x8 over 27m)  kubelet          Node ha-089400 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    27m (x8 over 27m)  kubelet          Node ha-089400 status is now: NodeHasNoDiskPressure
	  Normal  Starting                 27m                kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  27m                kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  27m                kubelet          Node ha-089400 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    27m                kubelet          Node ha-089400 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     27m                kubelet          Node ha-089400 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           27m                node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	  Normal  NodeReady                26m                kubelet          Node ha-089400 status is now: NodeReady
	  Normal  RegisteredNode           23m                node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	  Normal  RegisteredNode           19m                node-controller  Node ha-089400 event: Registered Node ha-089400 in Controller
	
	
	Name:               ha-089400-m02
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400-m02
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T18_54_23_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:54:18 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400-m02
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:17:38 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:15:33 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:15:33 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:15:33 +0000   Tue, 08 Apr 2025 18:54:18 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:15:33 +0000   Tue, 08 Apr 2025 18:54:45 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.34.212
	  Hostname:    ha-089400-m02
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 2f82861eb8e545dea9c55a9a05a7b7ba
	  System UUID:                2bd9c8ec-208e-1240-8d27-9f0c6ed89271
	  Boot ID:                    6ffadba8-87d2-406f-a63d-6b7871dd2907
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-lwn24                 0 (0%)        0 (0%)      0 (0%)           0 (0%)         18m
	  kube-system                 etcd-ha-089400-m02                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         23m
	  kube-system                 kindnet-r2jmm                            100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      23m
	  kube-system                 kube-apiserver-ha-089400-m02             250m (12%)    0 (0%)      0 (0%)           0 (0%)         23m
	  kube-system                 kube-controller-manager-ha-089400-m02    200m (10%)    0 (0%)      0 (0%)           0 (0%)         23m
	  kube-system                 kube-proxy-c4hjd                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         23m
	  kube-system                 kube-scheduler-ha-089400-m02             100m (5%)     0 (0%)      0 (0%)           0 (0%)         23m
	  kube-system                 kube-vip-ha-089400-m02                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         23m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (7%)  50Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 23m                kube-proxy       
	  Normal  NodeHasSufficientMemory  23m (x8 over 23m)  kubelet          Node ha-089400-m02 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    23m (x8 over 23m)  kubelet          Node ha-089400-m02 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     23m (x7 over 23m)  kubelet          Node ha-089400-m02 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           23m                node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	  Normal  RegisteredNode           23m                node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	  Normal  RegisteredNode           19m                node-controller  Node ha-089400-m02 event: Registered Node ha-089400-m02 in Controller
	
	
	Name:               ha-089400-m03
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T18_58_25_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 18:58:17 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400-m03
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:17:40 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:14:47 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:14:47 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:14:47 +0000   Tue, 08 Apr 2025 18:58:17 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:14:47 +0000   Tue, 08 Apr 2025 18:58:50 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.44.49
	  Hostname:    ha-089400-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 3d90b36bb2a74c3982ac76d0daa17ddf
	  System UUID:                59a21209-43a0-d24e-95d9-24be2f7a265f
	  Boot ID:                    684a319b-b7a5-481c-957e-da534b8a608a
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.2.0/24
	PodCIDRs:                     10.244.2.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                     CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                     ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-snc97                 0 (0%)        0 (0%)      0 (0%)           0 (0%)         18m
	  kube-system                 etcd-ha-089400-m03                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         19m
	  kube-system                 kindnet-jtrnl                            100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      19m
	  kube-system                 kube-apiserver-ha-089400-m03             250m (12%)    0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 kube-controller-manager-ha-089400-m03    200m (10%)    0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 kube-proxy-cqx6b                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 kube-scheduler-ha-089400-m03             100m (5%)     0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 kube-vip-ha-089400-m03                   0 (0%)        0 (0%)      0 (0%)           0 (0%)         19m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests    Limits
	  --------           --------    ------
	  cpu                750m (37%)  100m (5%)
	  memory             150Mi (7%)  50Mi (2%)
	  ephemeral-storage  0 (0%)      0 (0%)
	  hugepages-2Mi      0 (0%)      0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 19m                kube-proxy       
	  Normal  NodeHasSufficientMemory  19m (x8 over 19m)  kubelet          Node ha-089400-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    19m (x8 over 19m)  kubelet          Node ha-089400-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     19m (x7 over 19m)  kubelet          Node ha-089400-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  19m                kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           19m                node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	  Normal  RegisteredNode           19m                node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	  Normal  RegisteredNode           19m                node-controller  Node ha-089400-m03 event: Registered Node ha-089400-m03 in Controller
	
	
	Name:               ha-089400-m04
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=ha-089400-m04
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=ha-089400
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T19_04_03_0700
	                    minikube.k8s.io/version=v1.35.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:04:02 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  ha-089400-m04
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:17:40 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:14:22 +0000   Tue, 08 Apr 2025 19:04:02 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:14:22 +0000   Tue, 08 Apr 2025 19:04:02 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:14:22 +0000   Tue, 08 Apr 2025 19:04:02 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:14:22 +0000   Tue, 08 Apr 2025 19:04:34 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.44.0
	  Hostname:    ha-089400-m04
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 5f54149732454703b65b218be30d4d33
	  System UUID:                e095ded6-153c-e646-b701-dc81bc9c4bc9
	  Boot ID:                    019cb181-c503-4c56-a299-525f3ebd699a
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.3.0/24
	PodCIDRs:                     10.244.3.0/24
	Non-terminated Pods:          (2 in total)
	  Namespace                   Name                CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                ------------  ----------  ---------------  -------------  ---
	  kube-system                 kindnet-rpxg2       100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      13m
	  kube-system                 kube-proxy-772n2    0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (2%)  50Mi (2%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 13m                kube-proxy       
	  Normal  NodeHasSufficientMemory  13m (x8 over 13m)  kubelet          Node ha-089400-m04 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    13m (x8 over 13m)  kubelet          Node ha-089400-m04 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     13m (x7 over 13m)  kubelet          Node ha-089400-m04 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           13m                node-controller  Node ha-089400-m04 event: Registered Node ha-089400-m04 in Controller
	  Normal  RegisteredNode           13m                node-controller  Node ha-089400-m04 event: Registered Node ha-089400-m04 in Controller
	  Normal  RegisteredNode           13m                node-controller  Node ha-089400-m04 event: Registered Node ha-089400-m04 in Controller
	
	
	==> dmesg <==
	[  +6.924411] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000001] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 18:49] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.184710] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +31.673598] systemd-fstab-generator[1006]: Ignoring "noauto" option for root device
	[Apr 8 18:50] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.534954] systemd-fstab-generator[1044]: Ignoring "noauto" option for root device
	[  +0.194253] systemd-fstab-generator[1056]: Ignoring "noauto" option for root device
	[  +0.214985] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.864083] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.195304] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.207808] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.265895] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[ +11.074296] systemd-fstab-generator[1433]: Ignoring "noauto" option for root device
	[  +0.104184] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.621588] systemd-fstab-generator[1700]: Ignoring "noauto" option for root device
	[  +6.386990] systemd-fstab-generator[1854]: Ignoring "noauto" option for root device
	[  +0.102161] kauditd_printk_skb: 74 callbacks suppressed
	[  +6.073571] kauditd_printk_skb: 67 callbacks suppressed
	[  +3.465280] systemd-fstab-generator[2371]: Ignoring "noauto" option for root device
	[  +6.734586] kauditd_printk_skb: 17 callbacks suppressed
	[  +7.514381] kauditd_printk_skb: 29 callbacks suppressed
	[Apr 8 18:53] hrtimer: interrupt took 11100806 ns
	[Apr 8 18:54] kauditd_printk_skb: 26 callbacks suppressed
	
	
	==> etcd [dd0000ba2b8f] <==
	{"level":"info","ts":"2025-04-08T19:04:11.502440Z","caller":"traceutil/trace.go:171","msg":"trace[1527003619] transaction","detail":"{read_only:false; response_revision:2660; number_of_response:1; }","duration":"140.316632ms","start":"2025-04-08T19:04:11.362008Z","end":"2025-04-08T19:04:11.502324Z","steps":["trace[1527003619] 'process raft request'  (duration: 139.71073ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:17.771045Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"305.941656ms","expected-duration":"100ms","prefix":"read-only range ","request":"limit:1 serializable:true keys_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T19:04:17.771125Z","caller":"traceutil/trace.go:171","msg":"trace[2073551688] range","detail":"{range_begin:; range_end:; response_count:0; response_revision:2682; }","duration":"306.035956ms","start":"2025-04-08T19:04:17.465076Z","end":"2025-04-08T19:04:17.771112Z","steps":["trace[2073551688] 'range keys from in-memory index tree'  (duration: 305.913656ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:17.771226Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"268.731616ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/health\" ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T19:04:17.771272Z","caller":"traceutil/trace.go:171","msg":"trace[658827599] range","detail":"{range_begin:/registry/health; range_end:; response_count:0; response_revision:2682; }","duration":"268.804716ms","start":"2025-04-08T19:04:17.502460Z","end":"2025-04-08T19:04:17.771264Z","steps":["trace[658827599] 'range keys from in-memory index tree'  (duration: 267.372811ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:17.771578Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"207.410684ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" limit:1 ","response":"range_response_count:1 size:1110"}
	{"level":"info","ts":"2025-04-08T19:04:17.771629Z","caller":"traceutil/trace.go:171","msg":"trace[1971688561] range","detail":"{range_begin:/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath; range_end:; response_count:1; response_revision:2682; }","duration":"207.485984ms","start":"2025-04-08T19:04:17.564134Z","end":"2025-04-08T19:04:17.771620Z","steps":["trace[1971688561] 'range keys from in-memory index tree'  (duration: 205.691977ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:17.771921Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"169.33034ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/minions/ha-089400-m04\" limit:1 ","response":"range_response_count:1 size:3111"}
	{"level":"info","ts":"2025-04-08T19:04:17.771970Z","caller":"traceutil/trace.go:171","msg":"trace[827494005] range","detail":"{range_begin:/registry/minions/ha-089400-m04; range_end:; response_count:1; response_revision:2682; }","duration":"169.396641ms","start":"2025-04-08T19:04:17.602565Z","end":"2025-04-08T19:04:17.771962Z","steps":["trace[827494005] 'range keys from in-memory index tree'  (duration: 167.806734ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:40.124696Z","caller":"etcdserver/raft.go:426","msg":"leader failed to send out heartbeat on time; took too long, leader is overloaded likely from slow disk","to":"c6df58ea03d5f9eb","heartbeat-interval":"100ms","expected-duration":"200ms","exceeded-duration":"131.751939ms"}
	{"level":"warn","ts":"2025-04-08T19:04:40.124812Z","caller":"etcdserver/raft.go:426","msg":"leader failed to send out heartbeat on time; took too long, leader is overloaded likely from slow disk","to":"a471cd1dbcc1855","heartbeat-interval":"100ms","expected-duration":"200ms","exceeded-duration":"131.873339ms"}
	{"level":"info","ts":"2025-04-08T19:04:40.127640Z","caller":"traceutil/trace.go:171","msg":"trace[145289096] linearizableReadLoop","detail":"{readStateIndex:3268; appliedIndex:3268; }","duration":"221.571828ms","start":"2025-04-08T19:04:39.906050Z","end":"2025-04-08T19:04:40.127622Z","steps":["trace[145289096] 'read index received'  (duration: 221.564028ms)","trace[145289096] 'applied index is now lower than readState.Index'  (duration: 5.7µs)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T19:04:40.138660Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"133.201298ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath\" limit:1 ","response":"range_response_count:1 size:1110"}
	{"level":"info","ts":"2025-04-08T19:04:40.138745Z","caller":"traceutil/trace.go:171","msg":"trace[891175719] range","detail":"{range_begin:/registry/services/endpoints/kube-system/k8s.io-minikube-hostpath; range_end:; response_count:1; response_revision:2747; }","duration":"133.418898ms","start":"2025-04-08T19:04:40.005315Z","end":"2025-04-08T19:04:40.138733Z","steps":["trace[891175719] 'agreement among raft nodes before linearized reading'  (duration: 133.268697ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T19:04:40.138666Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"232.606269ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/minions/ha-089400\" limit:1 ","response":"range_response_count:1 size:4763"}
	{"level":"info","ts":"2025-04-08T19:04:40.139054Z","caller":"traceutil/trace.go:171","msg":"trace[1988163678] range","detail":"{range_begin:/registry/minions/ha-089400; range_end:; response_count:1; response_revision:2747; }","duration":"233.02917ms","start":"2025-04-08T19:04:39.906015Z","end":"2025-04-08T19:04:40.139044Z","steps":["trace[1988163678] 'agreement among raft nodes before linearized reading'  (duration: 221.722128ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T19:05:29.493305Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":2018}
	{"level":"info","ts":"2025-04-08T19:05:29.549538Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":2018,"took":"55.481902ms","hash":3832585417,"current-db-size-bytes":3710976,"current-db-size":"3.7 MB","current-db-size-in-use-bytes":2478080,"current-db-size-in-use":"2.5 MB"}
	{"level":"info","ts":"2025-04-08T19:05:29.549654Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3832585417,"revision":2018,"compact-revision":1084}
	{"level":"info","ts":"2025-04-08T19:10:29.526810Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":2872}
	{"level":"info","ts":"2025-04-08T19:10:29.577371Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":2872,"took":"49.790762ms","hash":1239063887,"current-db-size-bytes":3710976,"current-db-size":"3.7 MB","current-db-size-in-use-bytes":2326528,"current-db-size-in-use":"2.3 MB"}
	{"level":"info","ts":"2025-04-08T19:10:29.577735Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":1239063887,"revision":2872,"compact-revision":2018}
	{"level":"info","ts":"2025-04-08T19:15:29.553621Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":3616}
	{"level":"info","ts":"2025-04-08T19:15:29.592317Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":3616,"took":"37.688114ms","hash":1414659339,"current-db-size-bytes":3710976,"current-db-size":"3.7 MB","current-db-size-in-use-bytes":1986560,"current-db-size-in-use":"2.0 MB"}
	{"level":"info","ts":"2025-04-08T19:15:29.593157Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":1414659339,"revision":3616,"compact-revision":2872}
	
	
	==> kernel <==
	 19:17:48 up 29 min,  0 users,  load average: 1.06, 0.79, 0.58
	Linux ha-089400 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [ea0911610b90] <==
	I0408 19:17:10.898517       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:17:20.893133       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:17:20.893382       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:17:20.893734       1 main.go:297] Handling node with IPs: map[172.22.44.0:{}]
	I0408 19:17:20.893988       1 main.go:324] Node ha-089400-m04 has CIDR [10.244.3.0/24] 
	I0408 19:17:20.894111       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:17:20.894124       1 main.go:301] handling current node
	I0408 19:17:20.894140       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:17:20.894145       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:17:30.895624       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:17:30.896049       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:17:30.896914       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:17:30.896932       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:17:30.897278       1 main.go:297] Handling node with IPs: map[172.22.44.0:{}]
	I0408 19:17:30.897695       1 main.go:324] Node ha-089400-m04 has CIDR [10.244.3.0/24] 
	I0408 19:17:30.898615       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:17:30.898840       1 main.go:301] handling current node
	I0408 19:17:40.901424       1 main.go:297] Handling node with IPs: map[172.22.47.59:{}]
	I0408 19:17:40.901536       1 main.go:301] handling current node
	I0408 19:17:40.901558       1 main.go:297] Handling node with IPs: map[172.22.34.212:{}]
	I0408 19:17:40.901567       1 main.go:324] Node ha-089400-m02 has CIDR [10.244.1.0/24] 
	I0408 19:17:40.901911       1 main.go:297] Handling node with IPs: map[172.22.44.49:{}]
	I0408 19:17:40.901944       1 main.go:324] Node ha-089400-m03 has CIDR [10.244.2.0/24] 
	I0408 19:17:40.902145       1 main.go:297] Handling node with IPs: map[172.22.44.0:{}]
	I0408 19:17:40.902179       1 main.go:324] Node ha-089400-m04 has CIDR [10.244.3.0/24] 
	
	
	==> kube-apiserver [f13d6fc3e949] <==
	I0408 18:50:35.714862       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 18:50:35.761737       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 18:50:35.789532       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 18:50:39.820339       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	I0408 18:50:40.329216       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	E0408 18:58:18.314332       1 finisher.go:175] "Unhandled Error" err="FinishRequest: post-timeout activity - time-elapsed: 11.1µs, panicked: false, err: context canceled, panic-reason: <nil>" logger="UnhandledError"
	E0408 18:58:18.314713       1 writers.go:123] "Unhandled Error" err="apiserver was unable to write a JSON response: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.316201       1 status.go:71] "Unhandled Error" err="apiserver received an error that is not an metav1.Status: &errors.errorString{s:\"http: Handler timeout\"}: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.317424       1 writers.go:136] "Unhandled Error" err="apiserver was unable to write a fallback JSON response: http: Handler timeout" logger="UnhandledError"
	E0408 18:58:18.322650       1 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="53.458932ms" method="PATCH" path="/api/v1/namespaces/default/events/ha-089400-m03.18346ce7874a75d5" result=null
	E0408 18:59:43.075414       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58299: use of closed network connection
	E0408 18:59:44.684313       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58301: use of closed network connection
	E0408 18:59:45.292441       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58304: use of closed network connection
	E0408 18:59:45.888901       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58306: use of closed network connection
	E0408 18:59:46.543609       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58308: use of closed network connection
	E0408 18:59:47.050078       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58310: use of closed network connection
	E0408 18:59:47.598997       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58312: use of closed network connection
	E0408 18:59:48.143315       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58314: use of closed network connection
	E0408 18:59:48.674108       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58316: use of closed network connection
	E0408 18:59:49.679279       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58319: use of closed network connection
	E0408 19:00:00.211405       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58321: use of closed network connection
	E0408 19:00:00.761286       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58325: use of closed network connection
	E0408 19:00:11.285375       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58327: use of closed network connection
	E0408 19:00:11.809013       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58330: use of closed network connection
	E0408 19:00:22.351099       1 conn.go:339] Error on socket receive: read tcp 172.22.47.254:8443->172.22.32.1:58332: use of closed network connection
	
	
	==> kube-controller-manager [64cc2cd29fdc] <==
	I0408 19:04:02.712748       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:03.106151       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:03.816653       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:04.767058       1 node_lifecycle_controller.go:886] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="ha-089400-m04"
	I0408 19:04:05.155061       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:05.623294       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:05.732415       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:06.383631       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:06.497273       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:12.715432       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:33.366314       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:34.733666       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:34.742566       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="ha-089400-m04"
	I0408 19:04:34.768757       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:34.805426       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:04:35.032642       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 19:05:22.067880       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m02"
	I0408 19:08:17.177309       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400"
	I0408 19:09:17.329624       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:09:40.725760       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 19:10:27.421357       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m02"
	I0408 19:13:23.366699       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400"
	I0408 19:14:22.719027       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m04"
	I0408 19:14:47.460069       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m03"
	I0408 19:15:34.022884       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="ha-089400-m02"
	
	
	==> kube-proxy [1fa28a71c2bd] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 18:50:42.911256       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 18:50:42.970229       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.47.59"]
	E0408 18:50:42.971294       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 18:50:43.056321       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 18:50:43.056447       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 18:50:43.056480       1 server_linux.go:170] "Using iptables Proxier"
	I0408 18:50:43.061345       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 18:50:43.063633       1 server.go:497] "Version info" version="v1.32.2"
	I0408 18:50:43.063672       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 18:50:43.074263       1 config.go:199] "Starting service config controller"
	I0408 18:50:43.074379       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 18:50:43.074474       1 config.go:105] "Starting endpoint slice config controller"
	I0408 18:50:43.074560       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 18:50:43.075266       1 config.go:329] "Starting node config controller"
	I0408 18:50:43.075300       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 18:50:43.175838       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 18:50:43.176118       1 shared_informer.go:320] Caches are synced for service config
	I0408 18:50:43.176268       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [6969b47aa676] <==
	E0408 18:58:17.724330       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod e9d6e73b-0bca-4d4c-a306-488d28a5ac38(kube-system/kindnet-jtrnl) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-jtrnl"
	E0408 18:58:17.724404       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod f17cfde2-a18e-418f-922f-8d36b3c2b976(kube-system/kube-proxy-cqx6b) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-cqx6b"
	E0408 18:58:17.726947       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-cqx6b\": pod kube-proxy-cqx6b is already assigned to node \"ha-089400-m03\"" pod="kube-system/kube-proxy-cqx6b"
	I0408 18:58:17.728700       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-cqx6b" node="ha-089400-m03"
	E0408 18:58:17.726324       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-jtrnl\": pod kindnet-jtrnl is already assigned to node \"ha-089400-m03\"" pod="kube-system/kindnet-jtrnl"
	I0408 18:58:17.732385       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-jtrnl" node="ha-089400-m03"
	E0408 18:59:35.008691       1 schedule_one.go:954] "Scheduler cache AssumePod failed" err="pod 7ebebfbc-18d5-4df7-b3f4-dd8565c43b81(default/busybox-58667487b6-snc97) is in the cache, so can't be assumed" pod="default/busybox-58667487b6-snc97"
	E0408 18:59:35.009157       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="pod 7ebebfbc-18d5-4df7-b3f4-dd8565c43b81(default/busybox-58667487b6-snc97) is in the cache, so can't be assumed" pod="default/busybox-58667487b6-snc97"
	I0408 18:59:35.009461       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-snc97" node="ha-089400-m03"
	E0408 18:59:35.231897       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-lwn24\": pod busybox-58667487b6-lwn24 is already assigned to node \"ha-089400-m02\"" plugin="DefaultBinder" pod="default/busybox-58667487b6-lwn24" node="ha-089400-m02"
	E0408 18:59:35.232941       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod f1ce7e68-1e3b-4fe5-a9ca-3f45c7bc1954(default/busybox-58667487b6-lwn24) wasn't assumed so cannot be forgotten" pod="default/busybox-58667487b6-lwn24"
	E0408 18:59:35.233205       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-lwn24\": pod busybox-58667487b6-lwn24 is already assigned to node \"ha-089400-m02\"" pod="default/busybox-58667487b6-lwn24"
	I0408 18:59:35.233697       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-lwn24" node="ha-089400-m02"
	E0408 18:59:35.236974       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-d76nt\": pod busybox-58667487b6-d76nt is already assigned to node \"ha-089400\"" plugin="DefaultBinder" pod="default/busybox-58667487b6-d76nt" node="ha-089400"
	E0408 18:59:35.239920       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod 76d5dd43-6aff-4922-b3a8-3663f5c88670(default/busybox-58667487b6-d76nt) wasn't assumed so cannot be forgotten" pod="default/busybox-58667487b6-d76nt"
	E0408 18:59:35.240159       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"busybox-58667487b6-d76nt\": pod busybox-58667487b6-d76nt is already assigned to node \"ha-089400\"" pod="default/busybox-58667487b6-d76nt"
	I0408 18:59:35.240386       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="default/busybox-58667487b6-d76nt" node="ha-089400"
	E0408 19:04:02.875682       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kube-proxy-xj9nw\": pod kube-proxy-xj9nw is already assigned to node \"ha-089400-m04\"" plugin="DefaultBinder" pod="kube-system/kube-proxy-xj9nw" node="ha-089400-m04"
	E0408 19:04:02.876553       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod f74c7107-6390-49eb-a041-622b8bce6d00(kube-system/kube-proxy-xj9nw) wasn't assumed so cannot be forgotten" pod="kube-system/kube-proxy-xj9nw"
	E0408 19:04:02.876712       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kube-proxy-xj9nw\": pod kube-proxy-xj9nw is already assigned to node \"ha-089400-m04\"" pod="kube-system/kube-proxy-xj9nw"
	I0408 19:04:02.876797       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kube-proxy-xj9nw" node="ha-089400-m04"
	E0408 19:04:02.876472       1 framework.go:1316] "Plugin Failed" err="Operation cannot be fulfilled on pods/binding \"kindnet-jbc5p\": pod kindnet-jbc5p is already assigned to node \"ha-089400-m04\"" plugin="DefaultBinder" pod="kube-system/kindnet-jbc5p" node="ha-089400-m04"
	E0408 19:04:02.877461       1 schedule_one.go:359] "scheduler cache ForgetPod failed" err="pod 96cab00e-f5eb-418f-90f5-623f14ef6175(kube-system/kindnet-jbc5p) wasn't assumed so cannot be forgotten" pod="kube-system/kindnet-jbc5p"
	E0408 19:04:02.879995       1 schedule_one.go:1058] "Error scheduling pod; retrying" err="running Bind plugin \"DefaultBinder\": Operation cannot be fulfilled on pods/binding \"kindnet-jbc5p\": pod kindnet-jbc5p is already assigned to node \"ha-089400-m04\"" pod="kube-system/kindnet-jbc5p"
	I0408 19:04:02.880151       1 schedule_one.go:1071] "Pod has been assigned to node. Abort adding it back to queue." pod="kube-system/kindnet-jbc5p" node="ha-089400-m04"
	
	
	==> kubelet <==
	Apr 08 19:13:35 ha-089400 kubelet[2378]: E0408 19:13:35.798677    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:13:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:13:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:13:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:13:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:14:35 ha-089400 kubelet[2378]: E0408 19:14:35.798292    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:14:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:14:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:14:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:14:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:15:35 ha-089400 kubelet[2378]: E0408 19:15:35.799919    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:15:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:15:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:15:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:15:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:16:35 ha-089400 kubelet[2378]: E0408 19:16:35.800423    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:16:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:16:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:16:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:16:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:17:35 ha-089400 kubelet[2378]: E0408 19:17:35.797488    2378 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:17:35 ha-089400 kubelet[2378]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:17:35 ha-089400 kubelet[2378]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:17:35 ha-089400 kubelet[2378]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:17:35 ha-089400 kubelet[2378]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p ha-089400 -n ha-089400
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p ha-089400 -n ha-089400: (12.696804s)
helpers_test.go:261: (dbg) Run:  kubectl --context ha-089400 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiControlPlane/serial/StopSecondaryNode FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiControlPlane/serial/StopSecondaryNode (48.85s)

                                                
                                    
x
+
TestMultiNode/serial/FreshStart2Nodes (488.02s)

                                                
                                                
=== RUN   TestMultiNode/serial/FreshStart2Nodes
multinode_test.go:96: (dbg) Run:  out/minikube-windows-amd64.exe start -p multinode-095200 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=hyperv
E0408 19:51:05.465865    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:51:52.263502    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:56:05.468247    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
multinode_test.go:96: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p multinode-095200 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=hyperv: exit status 90 (7m28.8210797s)

                                                
                                                
-- stdout --
	* [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	* Using the hyperv driver based on user configuration
	* Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	* Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	* Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	  - Generating certificates and keys ...
	  - Booting up control plane ...
	  - Configuring RBAC rules ...
	* Configuring CNI (Container Networking Interface) ...
	* Verifying Kubernetes components...
	  - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	* Enabled addons: storage-provisioner, default-storageclass
	
	* Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	* Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	* Found network options:
	  - NO_PROXY=172.22.37.202
	  - NO_PROXY=172.22.37.202
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	* To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	* To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	* 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 

                                                
                                                
** /stderr **
multinode_test.go:98: failed to start cluster. args "out/minikube-windows-amd64.exe start -p multinode-095200 --wait=true --memory=2200 --nodes=2 -v=8 --alsologtostderr --driver=hyperv" : exit status 90
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (13.0127326s)
helpers_test.go:244: <<< TestMultiNode/serial/FreshStart2Nodes FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/FreshStart2Nodes]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
E0408 19:56:52.265415    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (9.3091185s)
helpers_test.go:252: TestMultiNode/serial/FreshStart2Nodes logs: 
-- stdout --
	
	==> Audit <==
	|---------|-------------------------------------------|--------------------------|-------------------|---------|---------------------|---------------------|
	| Command |                   Args                    |         Profile          |       User        | Version |     Start Time      |      End Time       |
	|---------|-------------------------------------------|--------------------------|-------------------|---------|---------------------|---------------------|
	| pause   | -p json-output-059800                     | json-output-059800       | testUser          | v1.35.0 | 08 Apr 25 19:29 UTC | 08 Apr 25 19:29 UTC |
	|         | --output=json --user=testUser             |                          |                   |         |                     |                     |
	| unpause | -p json-output-059800                     | json-output-059800       | testUser          | v1.35.0 | 08 Apr 25 19:29 UTC | 08 Apr 25 19:29 UTC |
	|         | --output=json --user=testUser             |                          |                   |         |                     |                     |
	| stop    | -p json-output-059800                     | json-output-059800       | testUser          | v1.35.0 | 08 Apr 25 19:29 UTC | 08 Apr 25 19:30 UTC |
	|         | --output=json --user=testUser             |                          |                   |         |                     |                     |
	| delete  | -p json-output-059800                     | json-output-059800       | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:30 UTC | 08 Apr 25 19:30 UTC |
	| start   | -p json-output-error-393600               | json-output-error-393600 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:30 UTC |                     |
	|         | --memory=2200 --output=json               |                          |                   |         |                     |                     |
	|         | --wait=true --driver=fail                 |                          |                   |         |                     |                     |
	| delete  | -p json-output-error-393600               | json-output-error-393600 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:30 UTC | 08 Apr 25 19:30 UTC |
	| start   | -p first-128400                           | first-128400             | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:30 UTC | 08 Apr 25 19:33 UTC |
	|         | --driver=hyperv                           |                          |                   |         |                     |                     |
	| start   | -p second-128400                          | second-128400            | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:33 UTC | 08 Apr 25 19:37 UTC |
	|         | --driver=hyperv                           |                          |                   |         |                     |                     |
	| delete  | -p second-128400                          | second-128400            | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:37 UTC | 08 Apr 25 19:38 UTC |
	| delete  | -p first-128400                           | first-128400             | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:38 UTC | 08 Apr 25 19:39 UTC |
	| start   | -p mount-start-1-514700                   | mount-start-1-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:39 UTC | 08 Apr 25 19:41 UTC |
	|         | --memory=2048 --mount                     |                          |                   |         |                     |                     |
	|         | --mount-gid 0 --mount-msize               |                          |                   |         |                     |                     |
	|         | 6543 --mount-port 46464                   |                          |                   |         |                     |                     |
	|         | --mount-uid 0 --no-kubernetes             |                          |                   |         |                     |                     |
	|         | --driver=hyperv                           |                          |                   |         |                     |                     |
	| mount   | C:\Users\jenkins.minikube3:/minikube-host | mount-start-1-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:41 UTC |                     |
	|         | --profile mount-start-1-514700 --v 0      |                          |                   |         |                     |                     |
	|         | --9p-version 9p2000.L --gid 0 --ip        |                          |                   |         |                     |                     |
	|         | --msize 6543 --port 46464 --type 9p --uid |                          |                   |         |                     |                     |
	|         |                                         0 |                          |                   |         |                     |                     |
	| ssh     | mount-start-1-514700 ssh -- ls            | mount-start-1-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:41 UTC | 08 Apr 25 19:42 UTC |
	|         | /minikube-host                            |                          |                   |         |                     |                     |
	| start   | -p mount-start-2-514700                   | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:42 UTC | 08 Apr 25 19:44 UTC |
	|         | --memory=2048 --mount                     |                          |                   |         |                     |                     |
	|         | --mount-gid 0 --mount-msize               |                          |                   |         |                     |                     |
	|         | 6543 --mount-port 46465                   |                          |                   |         |                     |                     |
	|         | --mount-uid 0 --no-kubernetes             |                          |                   |         |                     |                     |
	|         | --driver=hyperv                           |                          |                   |         |                     |                     |
	| mount   | C:\Users\jenkins.minikube3:/minikube-host | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:44 UTC |                     |
	|         | --profile mount-start-2-514700 --v 0      |                          |                   |         |                     |                     |
	|         | --9p-version 9p2000.L --gid 0 --ip        |                          |                   |         |                     |                     |
	|         | --msize 6543 --port 46465 --type 9p --uid |                          |                   |         |                     |                     |
	|         |                                         0 |                          |                   |         |                     |                     |
	| ssh     | mount-start-2-514700 ssh -- ls            | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:44 UTC | 08 Apr 25 19:44 UTC |
	|         | /minikube-host                            |                          |                   |         |                     |                     |
	| delete  | -p mount-start-1-514700                   | mount-start-1-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:44 UTC | 08 Apr 25 19:45 UTC |
	|         | --alsologtostderr -v=5                    |                          |                   |         |                     |                     |
	| ssh     | mount-start-2-514700 ssh -- ls            | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:45 UTC | 08 Apr 25 19:45 UTC |
	|         | /minikube-host                            |                          |                   |         |                     |                     |
	| stop    | -p mount-start-2-514700                   | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:45 UTC | 08 Apr 25 19:46 UTC |
	| start   | -p mount-start-2-514700                   | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:46 UTC | 08 Apr 25 19:48 UTC |
	| mount   | C:\Users\jenkins.minikube3:/minikube-host | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC |                     |
	|         | --profile mount-start-2-514700 --v 0      |                          |                   |         |                     |                     |
	|         | --9p-version 9p2000.L --gid 0 --ip        |                          |                   |         |                     |                     |
	|         | --msize 6543 --port 46465 --type 9p --uid |                          |                   |         |                     |                     |
	|         |                                         0 |                          |                   |         |                     |                     |
	| ssh     | mount-start-2-514700 ssh -- ls            | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC | 08 Apr 25 19:48 UTC |
	|         | /minikube-host                            |                          |                   |         |                     |                     |
	| delete  | -p mount-start-2-514700                   | mount-start-2-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC | 08 Apr 25 19:48 UTC |
	| delete  | -p mount-start-1-514700                   | mount-start-1-514700     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:49 UTC | 08 Apr 25 19:49 UTC |
	| start   | -p multinode-095200                       | multinode-095200         | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:49 UTC |                     |
	|         | --wait=true --memory=2200                 |                          |                   |         |                     |                     |
	|         | --nodes=2 -v=8                            |                          |                   |         |                     |                     |
	|         | --alsologtostderr                         |                          |                   |         |                     |                     |
	|         | --driver=hyperv                           |                          |                   |         |                     |                     |
	|---------|-------------------------------------------|--------------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:28 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:28.096537979Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:28 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:28Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/b2bab718ffeb8618ae03e4a9b80d6f81a26b972d57a7b3d7543293aeb510d9a8/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:35 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:35Z" level=info msg="Stop pulling image docker.io/kindest/kindnetd:v20250214-acbabc1a: Status: Downloaded newer image for kindest/kindnetd:v20250214-acbabc1a"
	Apr 08 19:52:35 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:35.939688266Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:35 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:35.939761565Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:35 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:35.939779765Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:35 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:35.940770059Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.136399639Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.136611638Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138127036Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                      CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                              4 minutes ago       Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                              4 minutes ago       Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495   4 minutes ago       Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                              4 minutes ago       Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                              4 minutes ago       Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                              4 minutes ago       Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                              4 minutes ago       Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                              4 minutes ago       Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 19:56:46 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 19:52:50 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 19:52:50 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 19:52:50 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 19:52:50 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (8 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     4m27s
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         4m34s
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      4m27s
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         4m32s
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         4m32s
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m27s
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         4m32s
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         4m19s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age    From             Message
	  ----    ------                   ----   ----             -------
	  Normal  Starting                 4m24s  kube-proxy       
	  Normal  Starting                 4m33s  kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  4m32s  kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  4m32s  kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    4m32s  kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     4m32s  kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           4m28s  node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                4m4s   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	==> dmesg <==
	[  +1.392271] systemd-fstab-generator[115]: Ignoring "noauto" option for root device
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T19:52:13.535815Z","caller":"embed/etcd.go:600","msg":"serving peer traffic","address":"172.22.37.202:2380"}
	{"level":"info","ts":"2025-04-08T19:52:13.537882Z","caller":"embed/etcd.go:572","msg":"cmux::serve","address":"172.22.37.202:2380"}
	{"level":"info","ts":"2025-04-08T19:52:13.544245Z","caller":"embed/etcd.go:280","msg":"now serving peer/client/metrics","local-member-id":"fd713bd18919f474","initial-advertise-peer-urls":["https://172.22.37.202:2380"],"listen-peer-urls":["https://172.22.37.202:2380"],"advertise-client-urls":["https://172.22.37.202:2379"],"listen-client-urls":["https://127.0.0.1:2379","https://172.22.37.202:2379"],"listen-metrics-urls":["http://127.0.0.1:2381"]}
	{"level":"info","ts":"2025-04-08T19:52:13.547388Z","caller":"embed/etcd.go:871","msg":"serving metrics","address":"http://127.0.0.1:2381"}
	{"level":"info","ts":"2025-04-08T19:52:13.626184Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 is starting a new election at term 1"}
	{"level":"info","ts":"2025-04-08T19:52:13.626576Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became pre-candidate at term 1"}
	{"level":"info","ts":"2025-04-08T19:52:13.626757Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgPreVoteResp from fd713bd18919f474 at term 1"}
	{"level":"info","ts":"2025-04-08T19:52:13.626926Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became candidate at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627134Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgVoteResp from fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627369Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became leader at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627508Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: fd713bd18919f474 elected leader fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.633658Z","caller":"etcdserver/server.go:2140","msg":"published local member to cluster through raft","local-member-id":"fd713bd18919f474","local-member-attributes":"{Name:multinode-095200 ClientURLs:[https://172.22.37.202:2379]}","request-path":"/0/members/fd713bd18919f474/attributes","cluster-id":"720e62ee34e6c5c","publish-timeout":"7s"}
	{"level":"info","ts":"2025-04-08T19:52:13.633743Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.633703Z","caller":"etcdserver/server.go:2651","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.637216Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.643734Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.645529Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.648744Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"172.22.37.202:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.652143Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.652359Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.653175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.653497Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"720e62ee34e6c5c","local-member-id":"fd713bd18919f474","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653612Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653984Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:53:04.028898Z","caller":"traceutil/trace.go:171","msg":"trace[1500643244] transaction","detail":"{read_only:false; response_revision:427; number_of_response:1; }","duration":"208.713694ms","start":"2025-04-08T19:53:03.820165Z","end":"2025-04-08T19:53:04.028879Z","steps":["trace[1500643244] 'process raft request'  (duration: 208.58869ms)"],"step_count":1}
	
	
	==> kernel <==
	 19:56:53 up 6 min,  0 users,  load average: 0.90, 0.64, 0.32
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 19:54:47.237341       1 main.go:301] handling current node
	I0408 19:54:57.236802       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:54:57.236907       1 main.go:301] handling current node
	I0408 19:55:07.227491       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:07.227862       1 main.go:301] handling current node
	I0408 19:55:17.236376       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:17.236429       1 main.go:301] handling current node
	I0408 19:55:27.236680       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:27.236894       1 main.go:301] handling current node
	I0408 19:55:37.227504       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:37.227596       1 main.go:301] handling current node
	I0408 19:55:47.236092       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:47.236189       1 main.go:301] handling current node
	I0408 19:55:57.230950       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:55:57.230991       1 main.go:301] handling current node
	I0408 19:56:07.227024       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:56:07.227147       1 main.go:301] handling current node
	I0408 19:56:17.232230       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:56:17.232332       1 main.go:301] handling current node
	I0408 19:56:27.226746       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:56:27.226955       1 main.go:301] handling current node
	I0408 19:56:37.226752       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:56:37.226812       1 main.go:301] handling current node
	I0408 19:56:47.231267       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 19:56:47.231414       1 main.go:301] handling current node
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.718114       1 shared_informer.go:320] Caches are synced for *generic.policySource[*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicy,*k8s.io/api/admissionregistration/v1.ValidatingAdmissionPolicyBinding,k8s.io/apiserver/pkg/admission/plugin/policy/validating.Validator]
	I0408 19:52:16.718137       1 policy_source.go:240] refreshing policies
	I0408 19:52:16.718210       1 handler_discovery.go:451] Starting ResourceDiscoveryManager
	I0408 19:52:16.725276       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0408 19:52:16.743645       1 controller.go:615] quota admission added evaluator for: namespaces
	I0408 19:52:16.798487       1 aggregator.go:171] initial CRD sync complete...
	I0408 19:52:16.798547       1 autoregister_controller.go:144] Starting autoregister controller
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 19:52:24.269911       1 range_allocator.go:183] "Starting range CIDR allocator" logger="node-ipam-controller"
	I0408 19:52:24.269936       1 shared_informer.go:313] Waiting for caches to sync for cidrallocator
	I0408 19:52:24.269958       1 shared_informer.go:320] Caches are synced for cidrallocator
	I0408 19:52:24.274194       1 shared_informer.go:320] Caches are synced for resource quota
	I0408 19:52:24.274783       1 shared_informer.go:320] Caches are synced for stateful set
	I0408 19:52:24.289779       1 range_allocator.go:428] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-095200" podCIDRs=["10.244.0.0/24"]
	I0408 19:52:24.289839       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:24.289862       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:24.926201       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:25.859986       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="881.56052ms"
	I0408 19:52:25.943649       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="83.515777ms"
	I0408 19:52:25.945647       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="141.399µs"
	I0408 19:52:26.006473       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="91.099µs"
	I0408 19:52:26.318629       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="54.168523ms"
	I0408 19:52:26.348330       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="29.55473ms"
	I0408 19:52:26.348824       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="41.1µs"
	I0408 19:52:48.460092       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.478420       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.517527       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="207.199µs"
	I0408 19:52:48.565559       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="235.1µs"
	I0408 19:52:49.235869       1 node_lifecycle_controller.go:1057] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller"
	I0408 19:52:50.794603       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:50.874586       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="56.401µs"
	I0408 19:52:50.925812       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="25.010294ms"
	I0408 19:52:50.926517       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="48.701µs"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 19:52:48 multinode-095200 kubelet[2271]: I0408 19:52:48.620769    2271 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/host-path/b68a8994-c73c-4400-bed6-5644b0542cde-tmp\") pod \"storage-provisioner\" (UID: \"b68a8994-c73c-4400-bed6-5644b0542cde\") " pod="kube-system/storage-provisioner"
	Apr 08 19:52:48 multinode-095200 kubelet[2271]: I0408 19:52:48.620868    2271 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkdb9\" (UniqueName: \"kubernetes.io/projected/b68a8994-c73c-4400-bed6-5644b0542cde-kube-api-access-kkdb9\") pod \"storage-provisioner\" (UID: \"b68a8994-c73c-4400-bed6-5644b0542cde\") " pod="kube-system/storage-provisioner"
	Apr 08 19:52:48 multinode-095200 kubelet[2271]: I0408 19:52:48.620894    2271 reconciler_common.go:251] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/7c6e4abc-5e41-4396-8886-075ba936e08e-config-volume\") pod \"coredns-668d6bf9bc-4tn68\" (UID: \"7c6e4abc-5e41-4396-8886-075ba936e08e\") " pod="kube-system/coredns-668d6bf9bc-4tn68"
	Apr 08 19:52:50 multinode-095200 kubelet[2271]: I0408 19:52:50.873204    2271 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/storage-provisioner" podStartSLOduration=17.873167871 podStartE2EDuration="17.873167871s" podCreationTimestamp="2025-04-08 19:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-04-08 19:52:49.847451345 +0000 UTC m=+30.099486180" watchObservedRunningTime="2025-04-08 19:52:50.873167871 +0000 UTC m=+31.125202806"
	Apr 08 19:52:50 multinode-095200 kubelet[2271]: I0408 19:52:50.873396    2271 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="kube-system/coredns-668d6bf9bc-4tn68" podStartSLOduration=25.873385677999998 podStartE2EDuration="25.873385678s" podCreationTimestamp="2025-04-08 19:52:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-04-08 19:52:50.872540651 +0000 UTC m=+31.124575486" watchObservedRunningTime="2025-04-08 19:52:50.873385678 +0000 UTC m=+31.125420613"
	Apr 08 19:53:20 multinode-095200 kubelet[2271]: E0408 19:53:20.021024    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:53:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:53:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:53:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:53:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:54:20 multinode-095200 kubelet[2271]: E0408 19:54:20.021708    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:54:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:54:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:54:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:54:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:55:20 multinode-095200 kubelet[2271]: E0408 19:55:20.021569    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:55:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:55:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:55:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:55:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 19:56:20 multinode-095200 kubelet[2271]: E0408 19:56:20.022844    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 19:56:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 19:56:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 19:56:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 19:56:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	
	
	==> storage-provisioner [9927dca24957] <==
	I0408 19:52:49.620555       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0408 19:52:49.635491       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0408 19:52:49.635566       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0408 19:52:49.674217       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0408 19:52:49.675661       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"7dd83298-d9cf-49b5-b6a4-4a4373a2ee21", APIVersion:"v1", ResourceVersion:"404", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099 became leader
	I0408 19:52:49.675918       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	I0408 19:52:49.776790       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (13.1627266s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiNode/serial/FreshStart2Nodes FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/FreshStart2Nodes (488.02s)

                                                
                                    
x
+
TestMultiNode/serial/DeployApp2Nodes (750.63s)

                                                
                                                
=== RUN   TestMultiNode/serial/DeployApp2Nodes
multinode_test.go:493: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- apply -f ./testdata/multinodes/multinode-pod-dns-test.yaml
multinode_test.go:498: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- rollout status deployment/busybox
E0408 19:57:28.553061    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:01:05.470458    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:01:52.267749    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:03:15.361301    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:06:05.474072    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:06:52.270074    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
multinode_test.go:498: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- rollout status deployment/busybox: exit status 1 (10m4.8139253s)

                                                
                                                
-- stdout --
	Waiting for deployment "busybox" rollout to finish: 0 of 2 updated replicas are available...
	Waiting for deployment "busybox" rollout to finish: 1 of 2 updated replicas are available...

                                                
                                                
-- /stdout --
** stderr ** 
	error: deployment "busybox" exceeded its progress deadline

                                                
                                                
** /stderr **
multinode_test.go:500: failed to deploy busybox to multinode cluster
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:14.513700    7904 retry.go:31] will retry after 550.572654ms: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:15.508825    7904 retry.go:31] will retry after 1.172165562s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:17.148334    7904 retry.go:31] will retry after 1.160922497s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:18.735852    7904 retry.go:31] will retry after 3.828473318s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:23.015099    7904 retry.go:31] will retry after 3.026607236s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:26.496627    7904 retry.go:31] will retry after 9.576173667s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:36.516947    7904 retry.go:31] will retry after 8.441020713s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:45.402698    7904 retry.go:31] will retry after 9.071144806s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:07:54.895790    7904 retry.go:31] will retry after 21.15585991s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
I0408 20:08:16.531315    7904 retry.go:31] will retry after 40.601768991s: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:505: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].status.podIP}'
multinode_test.go:514: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:524: failed to resolve pod IPs: expected 2 Pod IPs but got 1 (may be temporary), output: "\n-- stdout --\n\t'10.244.0.3'\n\n-- /stdout --"
multinode_test.go:528: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:536: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Done: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- nslookup kubernetes.io: (1.8155527s)
multinode_test.go:536: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.io
multinode_test.go:536: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.io: exit status 1 (452.6561ms)

                                                
                                                
** stderr ** 
	Error from server (BadRequest): pod busybox-58667487b6-js7bg does not have a host assigned

                                                
                                                
** /stderr **
multinode_test.go:538: Pod busybox-58667487b6-js7bg could not resolve 'kubernetes.io': exit status 1
multinode_test.go:546: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.default
multinode_test.go:546: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.default: exit status 1 (498.913ms)

                                                
                                                
** stderr ** 
	Error from server (BadRequest): pod busybox-58667487b6-js7bg does not have a host assigned

                                                
                                                
** /stderr **
multinode_test.go:548: Pod busybox-58667487b6-js7bg could not resolve 'kubernetes.default': exit status 1
multinode_test.go:554: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.default.svc.cluster.local
multinode_test.go:554: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- nslookup kubernetes.default.svc.cluster.local: exit status 1 (450.6923ms)

                                                
                                                
** stderr ** 
	Error from server (BadRequest): pod busybox-58667487b6-js7bg does not have a host assigned

                                                
                                                
** /stderr **
multinode_test.go:556: Pod busybox-58667487b6-js7bg could not resolve local service (kubernetes.default.svc.cluster.local): exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (13.1209784s)
helpers_test.go:244: <<< TestMultiNode/serial/DeployApp2Nodes FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/DeployApp2Nodes]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (9.046473s)
helpers_test.go:252: TestMultiNode/serial/DeployApp2Nodes logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	| Command |                       Args                        |       Profile        |       User        | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	| mount   | C:\Users\jenkins.minikube3:/minikube-host         | mount-start-2-514700 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC |                     |
	|         | --profile mount-start-2-514700 --v 0              |                      |                   |         |                     |                     |
	|         | --9p-version 9p2000.L --gid 0 --ip                |                      |                   |         |                     |                     |
	|         | --msize 6543 --port 46465 --type 9p --uid         |                      |                   |         |                     |                     |
	|         |                                                 0 |                      |                   |         |                     |                     |
	| ssh     | mount-start-2-514700 ssh -- ls                    | mount-start-2-514700 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC | 08 Apr 25 19:48 UTC |
	|         | /minikube-host                                    |                      |                   |         |                     |                     |
	| delete  | -p mount-start-2-514700                           | mount-start-2-514700 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:48 UTC | 08 Apr 25 19:48 UTC |
	| delete  | -p mount-start-1-514700                           | mount-start-1-514700 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:49 UTC | 08 Apr 25 19:49 UTC |
	| start   | -p multinode-095200                               | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:49 UTC |                     |
	|         | --wait=true --memory=2200                         |                      |                   |         |                     |                     |
	|         | --nodes=2 -v=8                                    |                      |                   |         |                     |                     |
	|         | --alsologtostderr                                 |                      |                   |         |                     |                     |
	|         | --driver=hyperv                                   |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- apply -f                   | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC | 08 Apr 25 19:57 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- rollout                    | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC |                     |
	|         | status deployment/busybox                         |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | busybox-58667487b6-jn4np --                       |                      |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                      |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np --                       |                      |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                      |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np -- nslookup              |                      |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200     | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg -- nslookup              |                      |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                      |                   |         |                     |                     |
	|---------|---------------------------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310620301Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310792803Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310817803Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.311021905Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:11Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/24feeb6878af8f2e3d8a31bd0e2b179192ce02e8e20d04d4d523246b72a8ddfe/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 19:57:13 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:13Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268796369Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268860569Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268873669Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.269006570Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	81a5e3779adab       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   12 minutes ago      Running             busybox                   0                   24feeb6878af8       busybox-58667487b6-jn4np
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                                         16 minutes ago      Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                                         16 minutes ago      Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              16 minutes ago      Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                                         16 minutes ago      Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                                         17 minutes ago      Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                                         17 minutes ago      Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                                         17 minutes ago      Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                                         17 minutes ago      Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	[INFO] 10.244.0.3:48144 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000356702s
	[INFO] 10.244.0.3:34835 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 140 0.13279721s
	[INFO] 10.244.0.3:33803 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd 60 0.044468738s
	[INFO] 10.244.0.3:36247 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.087022914s
	[INFO] 10.244.0.3:44861 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000233501s
	[INFO] 10.244.0.3:35209 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.051064673s
	[INFO] 10.244.0.3:55261 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000271302s
	[INFO] 10.244.0.3:50891 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000226301s
	[INFO] 10.244.0.3:47466 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.029589859s
	[INFO] 10.244.0.3:56852 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000128201s
	[INFO] 10.244.0.3:53365 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000172101s
	[INFO] 10.244.0.3:58492 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000296702s
	[INFO] 10.244.0.3:41257 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000186601s
	[INFO] 10.244.0.3:33967 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000235602s
	[INFO] 10.244.0.3:34652 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000120301s
	[INFO] 10.244.0.3:42747 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000392603s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:09:21 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-jn4np                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         12m
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     16m
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         17m
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      16m
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         16m
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         16m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 16m   kube-proxy       
	  Normal  Starting                 17m   kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  17m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  17m   kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    17m   kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     17m   kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           17m   node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                16m   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	==> dmesg <==
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	[Apr 8 19:57] kauditd_printk_skb: 14 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T19:52:13.626757Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgPreVoteResp from fd713bd18919f474 at term 1"}
	{"level":"info","ts":"2025-04-08T19:52:13.626926Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became candidate at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627134Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgVoteResp from fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627369Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became leader at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627508Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: fd713bd18919f474 elected leader fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.633658Z","caller":"etcdserver/server.go:2140","msg":"published local member to cluster through raft","local-member-id":"fd713bd18919f474","local-member-attributes":"{Name:multinode-095200 ClientURLs:[https://172.22.37.202:2379]}","request-path":"/0/members/fd713bd18919f474/attributes","cluster-id":"720e62ee34e6c5c","publish-timeout":"7s"}
	{"level":"info","ts":"2025-04-08T19:52:13.633743Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.633703Z","caller":"etcdserver/server.go:2651","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.637216Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.643734Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.645529Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.648744Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"172.22.37.202:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.652143Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.652359Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.653175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.653497Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"720e62ee34e6c5c","local-member-id":"fd713bd18919f474","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653612Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653984Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:53:04.028898Z","caller":"traceutil/trace.go:171","msg":"trace[1500643244] transaction","detail":"{read_only:false; response_revision:427; number_of_response:1; }","duration":"208.713694ms","start":"2025-04-08T19:53:03.820165Z","end":"2025-04-08T19:53:04.028879Z","steps":["trace[1500643244] 'process raft request'  (duration: 208.58869ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:02:13.994704Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":650}
	{"level":"info","ts":"2025-04-08T20:02:14.012096Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":650,"took":"16.853983ms","hash":4142537579,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":2072576,"current-db-size-in-use":"2.1 MB"}
	{"level":"info","ts":"2025-04-08T20:02:14.012321Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4142537579,"revision":650,"compact-revision":-1}
	{"level":"info","ts":"2025-04-08T20:07:14.017211Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":892}
	{"level":"info","ts":"2025-04-08T20:07:14.030836Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":892,"took":"13.37027ms","hash":3377129079,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1581056,"current-db-size-in-use":"1.6 MB"}
	{"level":"info","ts":"2025-04-08T20:07:14.030906Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3377129079,"revision":892,"compact-revision":650}
	
	
	==> kernel <==
	 20:09:24 up 19 min,  0 users,  load average: 0.79, 1.12, 0.69
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 20:07:17.234688       1 main.go:301] handling current node
	I0408 20:07:27.227467       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:07:27.227570       1 main.go:301] handling current node
	I0408 20:07:37.227736       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:07:37.227889       1 main.go:301] handling current node
	I0408 20:07:47.229626       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:07:47.229768       1 main.go:301] handling current node
	I0408 20:07:57.227630       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:07:57.227764       1 main.go:301] handling current node
	I0408 20:08:07.230438       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:07.230598       1 main.go:301] handling current node
	I0408 20:08:17.236717       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:17.236980       1 main.go:301] handling current node
	I0408 20:08:27.226938       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:27.227173       1 main.go:301] handling current node
	I0408 20:08:37.227989       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:37.228026       1 main.go:301] handling current node
	I0408 20:08:47.231207       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:47.231337       1 main.go:301] handling current node
	I0408 20:08:57.227527       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:57.227912       1 main.go:301] handling current node
	I0408 20:09:07.226885       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:07.226941       1 main.go:301] handling current node
	I0408 20:09:17.231119       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:17.231283       1 main.go:301] handling current node
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.725276       1 shared_informer.go:320] Caches are synced for cluster_authentication_trust_controller
	I0408 19:52:16.743645       1 controller.go:615] quota admission added evaluator for: namespaces
	I0408 19:52:16.798487       1 aggregator.go:171] initial CRD sync complete...
	I0408 19:52:16.798547       1 autoregister_controller.go:144] Starting autoregister controller
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0408 20:08:59.828087       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59231: use of closed network connection
	E0408 20:09:00.905990       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59236: use of closed network connection
	E0408 20:09:01.943484       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59241: use of closed network connection
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 19:52:25.943649       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="83.515777ms"
	I0408 19:52:25.945647       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="141.399µs"
	I0408 19:52:26.006473       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="91.099µs"
	I0408 19:52:26.318629       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="54.168523ms"
	I0408 19:52:26.348330       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="29.55473ms"
	I0408 19:52:26.348824       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="41.1µs"
	I0408 19:52:48.460092       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.478420       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.517527       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="207.199µs"
	I0408 19:52:48.565559       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="235.1µs"
	I0408 19:52:49.235869       1 node_lifecycle_controller.go:1057] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller"
	I0408 19:52:50.794603       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:50.874586       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="56.401µs"
	I0408 19:52:50.925812       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="25.010294ms"
	I0408 19:52:50.926517       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="48.701µs"
	I0408 19:57:09.305652       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="89.317584ms"
	I0408 19:57:09.320397       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="14.071723ms"
	I0408 19:57:09.320633       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="146.702µs"
	I0408 19:57:09.990300       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="52.201µs"
	I0408 19:57:13.577429       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="8.735738ms"
	I0408 19:57:13.577819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="292.801µs"
	I0408 19:57:16.821912       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:57:27.107575       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:01:11.176721       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:06:17.042230       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 20:05:20 multinode-095200 kubelet[2271]: E0408 20:05:20.022192    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:06:20 multinode-095200 kubelet[2271]: E0408 20:06:20.021166    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:07:20 multinode-095200 kubelet[2271]: E0408 20:07:20.021744    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:08:20 multinode-095200 kubelet[2271]: E0408 20:08:20.021471    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:09:20 multinode-095200 kubelet[2271]: E0408 20:09:20.022901    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	
	
	==> storage-provisioner [9927dca24957] <==
	I0408 19:52:49.620555       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0408 19:52:49.635491       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0408 19:52:49.635566       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0408 19:52:49.674217       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0408 19:52:49.675661       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"7dd83298-d9cf-49b5-b6a4-4a4373a2ee21", APIVersion:"v1", ResourceVersion:"404", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099 became leader
	I0408 19:52:49.675918       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	I0408 19:52:49.776790       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (12.5315878s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: busybox-58667487b6-js7bg
helpers_test.go:274: ======> post-mortem[TestMultiNode/serial/DeployApp2Nodes]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context multinode-095200 describe pod busybox-58667487b6-js7bg
helpers_test.go:282: (dbg) kubectl --context multinode-095200 describe pod busybox-58667487b6-js7bg:

                                                
                                                
-- stdout --
	Name:             busybox-58667487b6-js7bg
	Namespace:        default
	Priority:         0
	Service Account:  default
	Node:             <none>
	Labels:           app=busybox
	                  pod-template-hash=58667487b6
	Annotations:      <none>
	Status:           Pending
	IP:               
	IPs:              <none>
	Controlled By:    ReplicaSet/busybox-58667487b6
	Containers:
	  busybox:
	    Image:      gcr.io/k8s-minikube/busybox:1.28
	    Port:       <none>
	    Host Port:  <none>
	    Command:
	      sleep
	      3600
	    Environment:  <none>
	    Mounts:
	      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-zp8wl (ro)
	Conditions:
	  Type           Status
	  PodScheduled   False 
	Volumes:
	  kube-api-access-zp8wl:
	    Type:                    Projected (a volume that contains injected data from multiple sources)
	    TokenExpirationSeconds:  3607
	    ConfigMapName:           kube-root-ca.crt
	    ConfigMapOptional:       <nil>
	    DownwardAPI:             true
	QoS Class:                   BestEffort
	Node-Selectors:              <none>
	Tolerations:                 node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
	                             node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
	Events:
	  Type     Reason            Age                  From               Message
	  ----     ------            ----                 ----               -------
	  Warning  FailedScheduling  2m18s (x3 over 12m)  default-scheduler  0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 No preemption victims found for incoming pod.

                                                
                                                
-- /stdout --
helpers_test.go:285: <<< TestMultiNode/serial/DeployApp2Nodes FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/DeployApp2Nodes (750.63s)

                                                
                                    
x
+
TestMultiNode/serial/PingHostFrom2Pods (51.8s)

                                                
                                                
=== RUN   TestMultiNode/serial/PingHostFrom2Pods
multinode_test.go:564: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- get pods -o jsonpath='{.items[*].metadata.name}'
multinode_test.go:572: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:583: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- sh -c "ping -c 1 172.22.32.1"
multinode_test.go:583: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-jn4np -- sh -c "ping -c 1 172.22.32.1": exit status 1 (10.5306272s)

                                                
                                                
-- stdout --
	PING 172.22.32.1 (172.22.32.1): 56 data bytes
	
	--- 172.22.32.1 ping statistics ---
	1 packets transmitted, 0 packets received, 100% packet loss

                                                
                                                
-- /stdout --
** stderr ** 
	command terminated with exit code 1

                                                
                                                
** /stderr **
multinode_test.go:584: Failed to ping host (172.22.32.1) from pod (busybox-58667487b6-jn4np): exit status 1
multinode_test.go:572: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3"
multinode_test.go:572: (dbg) Non-zero exit: out/minikube-windows-amd64.exe kubectl -p multinode-095200 -- exec busybox-58667487b6-js7bg -- sh -c "nslookup host.minikube.internal | awk 'NR==5' | cut -d' ' -f3": exit status 1 (455.2009ms)

                                                
                                                
** stderr ** 
	Error from server (BadRequest): pod busybox-58667487b6-js7bg does not have a host assigned

                                                
                                                
** /stderr **
multinode_test.go:574: Pod busybox-58667487b6-js7bg could not resolve 'host.minikube.internal': exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (12.5662987s)
helpers_test.go:244: <<< TestMultiNode/serial/PingHostFrom2Pods FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/PingHostFrom2Pods]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (9.6694874s)
helpers_test.go:252: TestMultiNode/serial/PingHostFrom2Pods logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| Command |                       Args                        |     Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| start   | -p multinode-095200                               | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:49 UTC |                     |
	|         | --wait=true --memory=2200                         |                  |                   |         |                     |                     |
	|         | --nodes=2 -v=8                                    |                  |                   |         |                     |                     |
	|         | --alsologtostderr                                 |                  |                   |         |                     |                     |
	|         | --driver=hyperv                                   |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- apply -f                   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC | 08 Apr 25 19:57 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- rollout                    | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC |                     |
	|         | status deployment/busybox                         |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-jn4np -- sh                    |                  |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1                          |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310620301Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310792803Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310817803Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.311021905Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:11Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/24feeb6878af8f2e3d8a31bd0e2b179192ce02e8e20d04d4d523246b72a8ddfe/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 19:57:13 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:13Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268796369Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268860569Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268873669Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.269006570Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	81a5e3779adab       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   12 minutes ago      Running             busybox                   0                   24feeb6878af8       busybox-58667487b6-jn4np
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                                         17 minutes ago      Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                                         17 minutes ago      Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              17 minutes ago      Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                                         17 minutes ago      Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                                         18 minutes ago      Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                                         18 minutes ago      Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                                         18 minutes ago      Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                                         18 minutes ago      Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	[INFO] 10.244.0.3:48144 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000356702s
	[INFO] 10.244.0.3:34835 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 140 0.13279721s
	[INFO] 10.244.0.3:33803 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd 60 0.044468738s
	[INFO] 10.244.0.3:36247 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.087022914s
	[INFO] 10.244.0.3:44861 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000233501s
	[INFO] 10.244.0.3:35209 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.051064673s
	[INFO] 10.244.0.3:55261 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000271302s
	[INFO] 10.244.0.3:50891 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000226301s
	[INFO] 10.244.0.3:47466 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.029589859s
	[INFO] 10.244.0.3:56852 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000128201s
	[INFO] 10.244.0.3:53365 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000172101s
	[INFO] 10.244.0.3:58492 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000296702s
	[INFO] 10.244.0.3:41257 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000186601s
	[INFO] 10.244.0.3:33967 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000235602s
	[INFO] 10.244.0.3:34652 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000120301s
	[INFO] 10.244.0.3:42747 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000392603s
	[INFO] 10.244.0.3:34722 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000319201s
	[INFO] 10.244.0.3:39354 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000278601s
	[INFO] 10.244.0.3:58350 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.0001081s
	[INFO] 10.244.0.3:46798 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.000209101s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:10:12 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:06:17 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-jn4np                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         13m
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     17m
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         17m
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      17m
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         17m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 17m   kube-proxy       
	  Normal  Starting                 17m   kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  17m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  17m   kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    17m   kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     17m   kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           17m   node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                17m   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	==> dmesg <==
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	[Apr 8 19:57] kauditd_printk_skb: 14 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T19:52:13.626757Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgPreVoteResp from fd713bd18919f474 at term 1"}
	{"level":"info","ts":"2025-04-08T19:52:13.626926Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became candidate at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627134Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 received MsgVoteResp from fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627369Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"fd713bd18919f474 became leader at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.627508Z","logger":"raft","caller":"etcdserver/zap_raft.go:77","msg":"raft.node: fd713bd18919f474 elected leader fd713bd18919f474 at term 2"}
	{"level":"info","ts":"2025-04-08T19:52:13.633658Z","caller":"etcdserver/server.go:2140","msg":"published local member to cluster through raft","local-member-id":"fd713bd18919f474","local-member-attributes":"{Name:multinode-095200 ClientURLs:[https://172.22.37.202:2379]}","request-path":"/0/members/fd713bd18919f474/attributes","cluster-id":"720e62ee34e6c5c","publish-timeout":"7s"}
	{"level":"info","ts":"2025-04-08T19:52:13.633743Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.633703Z","caller":"etcdserver/server.go:2651","msg":"setting up initial cluster version using v2 API","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.637216Z","caller":"embed/serve.go:103","msg":"ready to serve client requests"}
	{"level":"info","ts":"2025-04-08T19:52:13.643734Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.645529Z","caller":"v3rpc/health.go:61","msg":"grpc service status changed","service":"","status":"SERVING"}
	{"level":"info","ts":"2025-04-08T19:52:13.648744Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"172.22.37.202:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.652143Z","caller":"etcdmain/main.go:44","msg":"notifying init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.652359Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.653175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.653497Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"720e62ee34e6c5c","local-member-id":"fd713bd18919f474","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653612Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653984Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:53:04.028898Z","caller":"traceutil/trace.go:171","msg":"trace[1500643244] transaction","detail":"{read_only:false; response_revision:427; number_of_response:1; }","duration":"208.713694ms","start":"2025-04-08T19:53:03.820165Z","end":"2025-04-08T19:53:04.028879Z","steps":["trace[1500643244] 'process raft request'  (duration: 208.58869ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:02:13.994704Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":650}
	{"level":"info","ts":"2025-04-08T20:02:14.012096Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":650,"took":"16.853983ms","hash":4142537579,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":2072576,"current-db-size-in-use":"2.1 MB"}
	{"level":"info","ts":"2025-04-08T20:02:14.012321Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4142537579,"revision":650,"compact-revision":-1}
	{"level":"info","ts":"2025-04-08T20:07:14.017211Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":892}
	{"level":"info","ts":"2025-04-08T20:07:14.030836Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":892,"took":"13.37027ms","hash":3377129079,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1581056,"current-db-size-in-use":"1.6 MB"}
	{"level":"info","ts":"2025-04-08T20:07:14.030906Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3377129079,"revision":892,"compact-revision":650}
	
	
	==> kernel <==
	 20:10:13 up 20 min,  0 users,  load average: 0.34, 0.95, 0.65
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 20:08:07.230598       1 main.go:301] handling current node
	I0408 20:08:17.236717       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:17.236980       1 main.go:301] handling current node
	I0408 20:08:27.226938       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:27.227173       1 main.go:301] handling current node
	I0408 20:08:37.227989       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:37.228026       1 main.go:301] handling current node
	I0408 20:08:47.231207       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:47.231337       1 main.go:301] handling current node
	I0408 20:08:57.227527       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:08:57.227912       1 main.go:301] handling current node
	I0408 20:09:07.226885       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:07.226941       1 main.go:301] handling current node
	I0408 20:09:17.231119       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:17.231283       1 main.go:301] handling current node
	I0408 20:09:27.226810       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:27.227014       1 main.go:301] handling current node
	I0408 20:09:37.226841       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:37.226957       1 main.go:301] handling current node
	I0408 20:09:47.236666       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:47.236765       1 main.go:301] handling current node
	I0408 20:09:57.226613       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:09:57.226774       1 main.go:301] handling current node
	I0408 20:10:07.229508       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:10:07.229639       1 main.go:301] handling current node
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.798487       1 aggregator.go:171] initial CRD sync complete...
	I0408 19:52:16.798547       1 autoregister_controller.go:144] Starting autoregister controller
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0408 20:08:59.828087       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59231: use of closed network connection
	E0408 20:09:00.905990       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59236: use of closed network connection
	E0408 20:09:01.943484       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59241: use of closed network connection
	E0408 20:09:40.196961       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59261: use of closed network connection
	E0408 20:09:50.729406       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59263: use of closed network connection
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 19:52:25.943649       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="83.515777ms"
	I0408 19:52:25.945647       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="141.399µs"
	I0408 19:52:26.006473       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="91.099µs"
	I0408 19:52:26.318629       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="54.168523ms"
	I0408 19:52:26.348330       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="29.55473ms"
	I0408 19:52:26.348824       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="41.1µs"
	I0408 19:52:48.460092       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.478420       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:48.517527       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="207.199µs"
	I0408 19:52:48.565559       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="235.1µs"
	I0408 19:52:49.235869       1 node_lifecycle_controller.go:1057] "Controller detected that some Nodes are Ready. Exiting master disruption mode" logger="node-lifecycle-controller"
	I0408 19:52:50.794603       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:52:50.874586       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="56.401µs"
	I0408 19:52:50.925812       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="25.010294ms"
	I0408 19:52:50.926517       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="kube-system/coredns-668d6bf9bc" duration="48.701µs"
	I0408 19:57:09.305652       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="89.317584ms"
	I0408 19:57:09.320397       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="14.071723ms"
	I0408 19:57:09.320633       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="146.702µs"
	I0408 19:57:09.990300       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="52.201µs"
	I0408 19:57:13.577429       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="8.735738ms"
	I0408 19:57:13.577819       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="292.801µs"
	I0408 19:57:16.821912       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:57:27.107575       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:01:11.176721       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:06:17.042230       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 20:05:20 multinode-095200 kubelet[2271]: E0408 20:05:20.022192    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:05:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:06:20 multinode-095200 kubelet[2271]: E0408 20:06:20.021166    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:06:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:07:20 multinode-095200 kubelet[2271]: E0408 20:07:20.021744    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:07:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:08:20 multinode-095200 kubelet[2271]: E0408 20:08:20.021471    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:08:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:09:20 multinode-095200 kubelet[2271]: E0408 20:09:20.022901    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:09:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	
	
	==> storage-provisioner [9927dca24957] <==
	I0408 19:52:49.620555       1 storage_provisioner.go:116] Initializing the minikube storage provisioner...
	I0408 19:52:49.635491       1 storage_provisioner.go:141] Storage provisioner initialized, now starting service!
	I0408 19:52:49.635566       1 leaderelection.go:243] attempting to acquire leader lease kube-system/k8s.io-minikube-hostpath...
	I0408 19:52:49.674217       1 leaderelection.go:253] successfully acquired lease kube-system/k8s.io-minikube-hostpath
	I0408 19:52:49.675661       1 event.go:282] Event(v1.ObjectReference{Kind:"Endpoints", Namespace:"kube-system", Name:"k8s.io-minikube-hostpath", UID:"7dd83298-d9cf-49b5-b6a4-4a4373a2ee21", APIVersion:"v1", ResourceVersion:"404", FieldPath:""}): type: 'Normal' reason: 'LeaderElection' multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099 became leader
	I0408 19:52:49.675918       1 controller.go:835] Starting provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	I0408 19:52:49.776790       1 controller.go:884] Started provisioner controller k8s.io/minikube-hostpath_multinode-095200_1b556566-aa1d-41e7-8493-4f122a977099!
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (14.9556364s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:272: non-running pods: busybox-58667487b6-js7bg
helpers_test.go:274: ======> post-mortem[TestMultiNode/serial/PingHostFrom2Pods]: describe non-running pods <======
helpers_test.go:277: (dbg) Run:  kubectl --context multinode-095200 describe pod busybox-58667487b6-js7bg
helpers_test.go:282: (dbg) kubectl --context multinode-095200 describe pod busybox-58667487b6-js7bg:

                                                
                                                
-- stdout --
	Name:             busybox-58667487b6-js7bg
	Namespace:        default
	Priority:         0
	Service Account:  default
	Node:             <none>
	Labels:           app=busybox
	                  pod-template-hash=58667487b6
	Annotations:      <none>
	Status:           Pending
	IP:               
	IPs:              <none>
	Controlled By:    ReplicaSet/busybox-58667487b6
	Containers:
	  busybox:
	    Image:      gcr.io/k8s-minikube/busybox:1.28
	    Port:       <none>
	    Host Port:  <none>
	    Command:
	      sleep
	      3600
	    Environment:  <none>
	    Mounts:
	      /var/run/secrets/kubernetes.io/serviceaccount from kube-api-access-zp8wl (ro)
	Conditions:
	  Type           Status
	  PodScheduled   False 
	Volumes:
	  kube-api-access-zp8wl:
	    Type:                    Projected (a volume that contains injected data from multiple sources)
	    TokenExpirationSeconds:  3607
	    ConfigMapName:           kube-root-ca.crt
	    ConfigMapOptional:       <nil>
	    DownwardAPI:             true
	QoS Class:                   BestEffort
	Node-Selectors:              <none>
	Tolerations:                 node.kubernetes.io/not-ready:NoExecute op=Exists for 300s
	                             node.kubernetes.io/unreachable:NoExecute op=Exists for 300s
	Events:
	  Type     Reason            Age                  From               Message
	  ----     ------            ----                 ----               -------
	  Warning  FailedScheduling  3m10s (x3 over 13m)  default-scheduler  0/1 nodes are available: 1 node(s) didn't match pod anti-affinity rules. preemption: 0/1 nodes are available: 1 No preemption victims found for incoming pod.

                                                
                                                
-- /stdout --
helpers_test.go:285: <<< TestMultiNode/serial/PingHostFrom2Pods FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/PingHostFrom2Pods (51.80s)

                                                
                                    
x
+
TestMultiNode/serial/AddNode (282.09s)

                                                
                                                
=== RUN   TestMultiNode/serial/AddNode
multinode_test.go:121: (dbg) Run:  out/minikube-windows-amd64.exe node add -p multinode-095200 -v 3 --alsologtostderr
E0408 20:11:05.475334    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:11:52.272597    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
multinode_test.go:121: (dbg) Done: out/minikube-windows-amd64.exe node add -p multinode-095200 -v 3 --alsologtostderr: (3m30.0865483s)
multinode_test.go:127: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr
E0408 20:14:08.563523    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
multinode_test.go:127: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr: exit status 2 (36.4657839s)

                                                
                                                
-- stdout --
	multinode-095200
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-095200-m02
	type: Worker
	host: Running
	kubelet: Stopped
	
	multinode-095200-m03
	type: Worker
	host: Running
	kubelet: Running
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 20:14:01.321164    4828 out.go:345] Setting OutFile to fd 1844 ...
	I0408 20:14:01.401911    4828 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:14:01.401911    4828 out.go:358] Setting ErrFile to fd 1740...
	I0408 20:14:01.401911    4828 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:14:01.418704    4828 out.go:352] Setting JSON to false
	I0408 20:14:01.418787    4828 mustload.go:65] Loading cluster: multinode-095200
	I0408 20:14:01.418900    4828 notify.go:220] Checking for updates...
	I0408 20:14:01.419523    4828 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 20:14:01.419523    4828 status.go:174] checking status of multinode-095200 ...
	I0408 20:14:01.420801    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:14:03.627165    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:03.627165    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:03.627165    4828 status.go:371] multinode-095200 host status = "Running" (err=<nil>)
	I0408 20:14:03.627165    4828 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:14:03.627811    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:14:05.849091    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:05.849091    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:05.849091    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:08.445656    4828 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:14:08.445931    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:08.445931    4828 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:14:08.458708    4828 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:14:08.458708    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:14:10.639954    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:10.639954    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:10.640145    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:13.283974    4828 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:14:13.283974    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:13.285187    4828 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 20:14:13.382625    4828 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9237996s)
	I0408 20:14:13.395731    4828 ssh_runner.go:195] Run: systemctl --version
	I0408 20:14:13.419867    4828 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:14:13.446887    4828 kubeconfig.go:125] found "multinode-095200" server: "https://172.22.37.202:8443"
	I0408 20:14:13.447034    4828 api_server.go:166] Checking apiserver status ...
	I0408 20:14:13.460630    4828 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 20:14:13.500788    4828 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup
	W0408 20:14:13.521189    4828 api_server.go:177] unable to find freezer cgroup: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup: Process exited with status 1
	stdout:
	
	stderr:
	I0408 20:14:13.533183    4828 ssh_runner.go:195] Run: ls
	I0408 20:14:13.540202    4828 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 20:14:13.550113    4828 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 20:14:13.550113    4828 status.go:463] multinode-095200 apiserver status = Running (err=<nil>)
	I0408 20:14:13.550113    4828 status.go:176] multinode-095200 status: &{Name:multinode-095200 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:14:13.550113    4828 status.go:174] checking status of multinode-095200-m02 ...
	I0408 20:14:13.550740    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:14:15.732471    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:15.732471    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:15.732471    4828 status.go:371] multinode-095200-m02 host status = "Running" (err=<nil>)
	I0408 20:14:15.733356    4828 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:14:15.734343    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:14:17.977375    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:17.977375    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:17.978588    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:20.572837    4828 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:14:20.572837    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:20.572988    4828 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:14:20.587857    4828 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:14:20.587857    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:14:22.735859    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:22.736003    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:22.736003    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:25.332850    4828 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:14:25.333106    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:25.333371    4828 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 20:14:25.427017    4828 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.839121s)
	I0408 20:14:25.438482    4828 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:14:25.461710    4828 status.go:176] multinode-095200-m02 status: &{Name:multinode-095200-m02 Host:Running Kubelet:Stopped APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:14:25.461710    4828 status.go:174] checking status of multinode-095200-m03 ...
	I0408 20:14:25.462622    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:14:27.691744    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:27.692126    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:27.692126    4828 status.go:371] multinode-095200-m03 host status = "Running" (err=<nil>)
	I0408 20:14:27.692126    4828 host.go:66] Checking if "multinode-095200-m03" exists ...
	I0408 20:14:27.693083    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:14:29.947910    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:29.947910    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:29.948964    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:32.604420    4828 main.go:141] libmachine: [stdout =====>] : 172.22.39.251
	
	I0408 20:14:32.604716    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:32.604910    4828 host.go:66] Checking if "multinode-095200-m03" exists ...
	I0408 20:14:32.617084    4828 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:14:32.617084    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:14:34.830353    4828 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:14:34.830353    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:34.831268    4828 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 20:14:37.499098    4828 main.go:141] libmachine: [stdout =====>] : 172.22.39.251
	
	I0408 20:14:37.499616    4828 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:14:37.499688    4828 sshutil.go:53] new ssh client: &{IP:172.22.39.251 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m03\id_rsa Username:docker}
	I0408 20:14:37.596876    4828 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9797528s)
	I0408 20:14:37.610231    4828 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:14:37.636236    4828 status.go:176] multinode-095200-m03 status: &{Name:multinode-095200-m03 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
multinode_test.go:129: failed to run minikube status. args "out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr" : exit status 2
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (12.4762081s)
helpers_test.go:244: <<< TestMultiNode/serial/AddNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/AddNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (8.676368s)
helpers_test.go:252: TestMultiNode/serial/AddNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| Command |                       Args                        |     Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| kubectl | -p multinode-095200 -- apply -f                   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC | 08 Apr 25 19:57 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- rollout                    | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC |                     |
	|         | status deployment/busybox                         |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-jn4np -- sh                    |                  |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1                          |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	| node    | add -p multinode-095200 -v 3                      | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:10 UTC | 08 Apr 25 20:14 UTC |
	|         | --alsologtostderr                                 |                  |                   |         |                     |                     |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310620301Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310792803Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310817803Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.311021905Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:11Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/24feeb6878af8f2e3d8a31bd0e2b179192ce02e8e20d04d4d523246b72a8ddfe/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 19:57:13 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:13Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268796369Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268860569Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268873669Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.269006570Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	81a5e3779adab       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   17 minutes ago      Running             busybox                   0                   24feeb6878af8       busybox-58667487b6-jn4np
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                                         22 minutes ago      Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                                         22 minutes ago      Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              22 minutes ago      Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                                         22 minutes ago      Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                                         22 minutes ago      Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                                         22 minutes ago      Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                                         22 minutes ago      Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                                         22 minutes ago      Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	[INFO] 10.244.0.3:48144 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000356702s
	[INFO] 10.244.0.3:34835 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 140 0.13279721s
	[INFO] 10.244.0.3:33803 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd 60 0.044468738s
	[INFO] 10.244.0.3:36247 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.087022914s
	[INFO] 10.244.0.3:44861 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000233501s
	[INFO] 10.244.0.3:35209 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.051064673s
	[INFO] 10.244.0.3:55261 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000271302s
	[INFO] 10.244.0.3:50891 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000226301s
	[INFO] 10.244.0.3:47466 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.029589859s
	[INFO] 10.244.0.3:56852 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000128201s
	[INFO] 10.244.0.3:53365 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000172101s
	[INFO] 10.244.0.3:58492 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000296702s
	[INFO] 10.244.0.3:41257 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000186601s
	[INFO] 10.244.0.3:33967 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000235602s
	[INFO] 10.244.0.3:34652 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000120301s
	[INFO] 10.244.0.3:42747 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000392603s
	[INFO] 10.244.0.3:34722 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000319201s
	[INFO] 10.244.0.3:39354 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000278601s
	[INFO] 10.244.0.3:58350 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.0001081s
	[INFO] 10.244.0.3:46798 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.000209101s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:14:48 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:11:22 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:11:22 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:11:22 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:11:22 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-jn4np                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     22m
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         22m
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      22m
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         22m
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         22m
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         22m
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         22m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         22m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 22m   kube-proxy       
	  Normal  Starting                 22m   kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  22m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  22m   kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    22m   kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     22m   kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           22m   node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                22m   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	Name:               multinode-095200-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T20_13_27_0700
	                    minikube.k8s.io/version=v1.35.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 20:13:26 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200-m03
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:14:49 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:59 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.39.251
	  Hostname:    multinode-095200-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 5dd08273ae3d422da88197050793897d
	  System UUID:                eb5f1aa3-1dba-eb43-ad9d-e1ed36b898a1
	  Boot ID:                    d4cb601e-8968-4e50-b6e7-defe0d03ea49
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-js7bg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         17m
	  kube-system                 kindnet-4gvks               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      92s
	  kube-system                 kube-proxy-fcgz7            0 (0%)        0 (0%)      0 (0%)           0 (0%)         92s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (2%)  50Mi (2%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                From             Message
	  ----    ------                   ----               ----             -------
	  Normal  Starting                 79s                kube-proxy       
	  Normal  NodeHasSufficientMemory  92s (x2 over 92s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    92s (x2 over 92s)  kubelet          Node multinode-095200-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     92s (x2 over 92s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  92s                kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           89s                node-controller  Node multinode-095200-m03 event: Registered Node multinode-095200-m03 in Controller
	  Normal  NodeReady                59s                kubelet          Node multinode-095200-m03 status is now: NodeReady
	
	
	==> dmesg <==
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	[Apr 8 19:57] kauditd_printk_skb: 14 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T19:52:13.652359Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.653175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.653497Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"720e62ee34e6c5c","local-member-id":"fd713bd18919f474","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653612Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653984Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:53:04.028898Z","caller":"traceutil/trace.go:171","msg":"trace[1500643244] transaction","detail":"{read_only:false; response_revision:427; number_of_response:1; }","duration":"208.713694ms","start":"2025-04-08T19:53:03.820165Z","end":"2025-04-08T19:53:04.028879Z","steps":["trace[1500643244] 'process raft request'  (duration: 208.58869ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:02:13.994704Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":650}
	{"level":"info","ts":"2025-04-08T20:02:14.012096Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":650,"took":"16.853983ms","hash":4142537579,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":2072576,"current-db-size-in-use":"2.1 MB"}
	{"level":"info","ts":"2025-04-08T20:02:14.012321Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4142537579,"revision":650,"compact-revision":-1}
	{"level":"info","ts":"2025-04-08T20:07:14.017211Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":892}
	{"level":"info","ts":"2025-04-08T20:07:14.030836Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":892,"took":"13.37027ms","hash":3377129079,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1581056,"current-db-size-in-use":"1.6 MB"}
	{"level":"info","ts":"2025-04-08T20:07:14.030906Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3377129079,"revision":892,"compact-revision":650}
	{"level":"info","ts":"2025-04-08T20:10:48.783716Z","caller":"traceutil/trace.go:171","msg":"trace[1418995672] transaction","detail":"{read_only:false; response_revision:1304; number_of_response:1; }","duration":"172.026431ms","start":"2025-04-08T20:10:48.611671Z","end":"2025-04-08T20:10:48.783698Z","steps":["trace[1418995672] 'process raft request'  (duration: 171.88573ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:10:51.233244Z","caller":"traceutil/trace.go:171","msg":"trace[1434363476] transaction","detail":"{read_only:false; response_revision:1307; number_of_response:1; }","duration":"154.120134ms","start":"2025-04-08T20:10:51.079104Z","end":"2025-04-08T20:10:51.233224Z","steps":["trace[1434363476] 'process raft request'  (duration: 153.912433ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:11:50.092603Z","caller":"traceutil/trace.go:171","msg":"trace[1794927297] transaction","detail":"{read_only:false; response_revision:1355; number_of_response:1; }","duration":"207.047428ms","start":"2025-04-08T20:11:49.885538Z","end":"2025-04-08T20:11:50.092585Z","steps":["trace[1794927297] 'process raft request'  (duration: 206.952227ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:12:14.034956Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1132}
	{"level":"info","ts":"2025-04-08T20:12:14.043791Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":1132,"took":"8.264645ms","hash":3480195930,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1519616,"current-db-size-in-use":"1.5 MB"}
	{"level":"info","ts":"2025-04-08T20:12:14.043914Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3480195930,"revision":1132,"compact-revision":892}
	{"level":"info","ts":"2025-04-08T20:13:19.538892Z","caller":"traceutil/trace.go:171","msg":"trace[539230204] transaction","detail":"{read_only:false; response_revision:1427; number_of_response:1; }","duration":"196.84768ms","start":"2025-04-08T20:13:19.342005Z","end":"2025-04-08T20:13:19.538853Z","steps":["trace[539230204] 'process raft request'  (duration: 140.495971ms)","trace[539230204] 'compare'  (duration: 56.215708ms)"],"step_count":2}
	{"level":"info","ts":"2025-04-08T20:13:37.385572Z","caller":"traceutil/trace.go:171","msg":"trace[614883105] transaction","detail":"{read_only:false; response_revision:1480; number_of_response:1; }","duration":"116.69824ms","start":"2025-04-08T20:13:37.268859Z","end":"2025-04-08T20:13:37.385557Z","steps":["trace[614883105] 'process raft request'  (duration: 116.60574ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:13:37.497911Z","caller":"traceutil/trace.go:171","msg":"trace[1757030073] transaction","detail":"{read_only:false; response_revision:1481; number_of_response:1; }","duration":"112.257716ms","start":"2025-04-08T20:13:37.385615Z","end":"2025-04-08T20:13:37.497873Z","steps":["trace[1757030073] 'process raft request'  (duration: 44.166642ms)","trace[1757030073] 'compare'  (duration: 67.877973ms)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"283.043654ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/jobs/\" range_end:\"/registry/jobs0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132713Z","caller":"traceutil/trace.go:171","msg":"trace[488800171] range","detail":"{range_begin:/registry/jobs/; range_end:/registry/jobs0; response_count:0; response_revision:1492; }","duration":"283.247756ms","start":"2025-04-08T20:13:42.849451Z","end":"2025-04-08T20:13:43.132699Z","steps":["trace[488800171] 'count revisions from in-memory index tree'  (duration: 282.917554ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"126.454095ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/persistentvolumeclaims/\" range_end:\"/registry/persistentvolumeclaims0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132897Z","caller":"traceutil/trace.go:171","msg":"trace[1026523730] range","detail":"{range_begin:/registry/persistentvolumeclaims/; range_end:/registry/persistentvolumeclaims0; response_count:0; response_revision:1492; }","duration":"126.801497ms","start":"2025-04-08T20:13:43.006085Z","end":"2025-04-08T20:13:43.132887Z","steps":["trace[1026523730] 'count revisions from in-memory index tree'  (duration: 126.320694ms)"],"step_count":1}
	
	
	==> kernel <==
	 20:14:58 up 24 min,  0 users,  load average: 0.31, 0.67, 0.62
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 20:13:57.230443       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:07.226753       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:07.226868       1 main.go:301] handling current node
	I0408 20:14:07.226889       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:07.226897       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:17.227112       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:17.227234       1 main.go:301] handling current node
	I0408 20:14:17.227255       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:17.227262       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:27.234391       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:27.234621       1 main.go:301] handling current node
	I0408 20:14:27.234643       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:27.234651       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:37.227668       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:37.227809       1 main.go:301] handling current node
	I0408 20:14:37.228236       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:37.228258       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:47.235419       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:47.236282       1 main.go:301] handling current node
	I0408 20:14:47.237390       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:47.237667       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:14:57.226963       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:14:57.227103       1 main.go:301] handling current node
	I0408 20:14:57.227343       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:14:57.227520       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0408 20:08:59.828087       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59231: use of closed network connection
	E0408 20:09:00.905990       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59236: use of closed network connection
	E0408 20:09:01.943484       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59241: use of closed network connection
	E0408 20:09:40.196961       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59261: use of closed network connection
	E0408 20:09:50.729406       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59263: use of closed network connection
	E0408 20:13:27.732740       1 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0" auditID="25aea89a-4448-4253-aeac-bde38ac93ff6"
	E0408 20:13:27.732687       1 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="9.4µs" method="GET" path="/api/v1/services" result=null
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 19:57:16.821912       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 19:57:27.107575       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:01:11.176721       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:06:17.042230       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:11:22.697513       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:13:26.955211       1 actual_state_of_world.go:541] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-095200-m03\" does not exist"
	I0408 20:13:26.989485       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="41.6µs"
	I0408 20:13:27.001442       1 range_allocator.go:428] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-095200-m03" podCIDRs=["10.244.1.0/24"]
	I0408 20:13:27.001547       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.001576       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.359144       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.950840       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:29.495284       1 node_lifecycle_controller.go:886] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-095200-m03"
	I0408 20:13:29.512314       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:37.387903       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:57.842537       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.730120       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-095200-m03"
	I0408 20:13:59.731192       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.755844       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.769950       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="121.201µs"
	I0408 20:13:59.789908       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="69.4µs"
	I0408 20:14:02.549158       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="14.149878ms"
	I0408 20:14:02.549975       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="90.9µs"
	I0408 20:14:04.522546       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:14:28.386936       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 20:10:20 multinode-095200 kubelet[2271]: E0408 20:10:20.024993    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:10:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:10:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:10:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:10:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:11:20 multinode-095200 kubelet[2271]: E0408 20:11:20.022679    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:11:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:11:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:11:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:11:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:12:20 multinode-095200 kubelet[2271]: E0408 20:12:20.021916    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:13:20 multinode-095200 kubelet[2271]: E0408 20:13:20.021956    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:14:20 multinode-095200 kubelet[2271]: E0408 20:14:20.020995    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (12.2772349s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiNode/serial/AddNode FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/AddNode (282.09s)

                                                
                                    
x
+
TestMultiNode/serial/CopyFile (72.94s)

                                                
                                                
=== RUN   TestMultiNode/serial/CopyFile
multinode_test.go:184: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 status --output json --alsologtostderr
E0408 20:16:05.478471    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
multinode_test.go:184: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p multinode-095200 status --output json --alsologtostderr: exit status 2 (36.8917589s)

                                                
                                                
-- stdout --
	[{"Name":"multinode-095200","Host":"Running","Kubelet":"Running","APIServer":"Running","Kubeconfig":"Configured","Worker":false},{"Name":"multinode-095200-m02","Host":"Running","Kubelet":"Stopped","APIServer":"Irrelevant","Kubeconfig":"Irrelevant","Worker":true},{"Name":"multinode-095200-m03","Host":"Running","Kubelet":"Running","APIServer":"Irrelevant","Kubeconfig":"Irrelevant","Worker":true}]

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 20:15:50.043471    6208 out.go:345] Setting OutFile to fd 1728 ...
	I0408 20:15:50.139816    6208 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:15:50.139816    6208 out.go:358] Setting ErrFile to fd 1480...
	I0408 20:15:50.139816    6208 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:15:50.156773    6208 out.go:352] Setting JSON to true
	I0408 20:15:50.156773    6208 mustload.go:65] Loading cluster: multinode-095200
	I0408 20:15:50.156773    6208 notify.go:220] Checking for updates...
	I0408 20:15:50.158383    6208 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 20:15:50.158443    6208 status.go:174] checking status of multinode-095200 ...
	I0408 20:15:50.158758    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:15:52.420552    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:15:52.420552    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:15:52.420552    6208 status.go:371] multinode-095200 host status = "Running" (err=<nil>)
	I0408 20:15:52.420552    6208 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:15:52.423625    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:15:54.660722    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:15:54.660722    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:15:54.660722    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:15:57.302121    6208 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:15:57.302121    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:15:57.302121    6208 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:15:57.314012    6208 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:15:57.314012    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:15:59.526008    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:15:59.526212    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:15:59.526212    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:16:02.152019    6208 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:16:02.152549    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:02.152549    6208 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 20:16:02.266732    6208 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9526806s)
	I0408 20:16:02.279033    6208 ssh_runner.go:195] Run: systemctl --version
	I0408 20:16:02.301323    6208 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:16:02.327010    6208 kubeconfig.go:125] found "multinode-095200" server: "https://172.22.37.202:8443"
	I0408 20:16:02.327607    6208 api_server.go:166] Checking apiserver status ...
	I0408 20:16:02.339290    6208 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 20:16:02.376890    6208 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup
	W0408 20:16:02.393724    6208 api_server.go:177] unable to find freezer cgroup: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup: Process exited with status 1
	stdout:
	
	stderr:
	I0408 20:16:02.406029    6208 ssh_runner.go:195] Run: ls
	I0408 20:16:02.413657    6208 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 20:16:02.422958    6208 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 20:16:02.422958    6208 status.go:463] multinode-095200 apiserver status = Running (err=<nil>)
	I0408 20:16:02.422958    6208 status.go:176] multinode-095200 status: &{Name:multinode-095200 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:16:02.423936    6208 status.go:174] checking status of multinode-095200-m02 ...
	I0408 20:16:02.424584    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:16:04.597827    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:04.598063    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:04.598169    6208 status.go:371] multinode-095200-m02 host status = "Running" (err=<nil>)
	I0408 20:16:04.598169    6208 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:16:04.599304    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:16:06.765318    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:06.765318    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:06.765442    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:16:09.389524    6208 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:16:09.389524    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:09.390608    6208 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:16:09.406187    6208 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:16:09.406187    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:16:11.631188    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:11.631351    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:11.631434    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:16:14.268135    6208 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:16:14.268135    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:14.268511    6208 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 20:16:14.368615    6208 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9623875s)
	I0408 20:16:14.381238    6208 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:16:14.407761    6208 status.go:176] multinode-095200-m02 status: &{Name:multinode-095200-m02 Host:Running Kubelet:Stopped APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:16:14.407864    6208 status.go:174] checking status of multinode-095200-m03 ...
	I0408 20:16:14.408677    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:16:16.646920    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:16.647007    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:16.647066    6208 status.go:371] multinode-095200-m03 host status = "Running" (err=<nil>)
	I0408 20:16:16.647066    6208 host.go:66] Checking if "multinode-095200-m03" exists ...
	I0408 20:16:16.647842    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:16:18.992673    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:18.992673    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:18.992906    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 20:16:21.727069    6208 main.go:141] libmachine: [stdout =====>] : 172.22.39.251
	
	I0408 20:16:21.727069    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:21.727069    6208 host.go:66] Checking if "multinode-095200-m03" exists ...
	I0408 20:16:21.741837    6208 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:16:21.741837    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:16:24.007138    6208 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:16:24.007867    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:24.007934    6208 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m03 ).networkadapters[0]).ipaddresses[0]
	I0408 20:16:26.647592    6208 main.go:141] libmachine: [stdout =====>] : 172.22.39.251
	
	I0408 20:16:26.647592    6208 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:16:26.648315    6208 sshutil.go:53] new ssh client: &{IP:172.22.39.251 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m03\id_rsa Username:docker}
	I0408 20:16:26.747467    6208 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (5.0055905s)
	I0408 20:16:26.759242    6208 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:16:26.785694    6208 status.go:176] multinode-095200-m03 status: &{Name:multinode-095200-m03 Host:Running Kubelet:Running APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
multinode_test.go:186: failed to run minikube status. args "out/minikube-windows-amd64.exe -p multinode-095200 status --output json --alsologtostderr" : exit status 2
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (12.4420759s)
helpers_test.go:244: <<< TestMultiNode/serial/CopyFile FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/CopyFile]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (8.786245s)
helpers_test.go:252: TestMultiNode/serial/CopyFile logs: 
-- stdout --
	
	==> Audit <==
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| Command |                       Args                        |     Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| kubectl | -p multinode-095200 -- apply -f                   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC | 08 Apr 25 19:57 UTC |
	|         | ./testdata/multinodes/multinode-pod-dns-test.yaml |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- rollout                    | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC |                     |
	|         | status deployment/busybox                         |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io                            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --                       |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default                       |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg -- nslookup              |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o                | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | jsonpath='{.items[*].metadata.name}'              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-jn4np -- sh                    |                  |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1                          |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec                       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg                          |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                                 |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk                      |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3                           |                  |                   |         |                     |                     |
	| node    | add -p multinode-095200 -v 3                      | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:10 UTC | 08 Apr 25 20:14 UTC |
	|         | --alsologtostderr                                 |                  |                   |         |                     |                     |
	|---------|---------------------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310620301Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310792803Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310817803Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.311021905Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:11Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/24feeb6878af8f2e3d8a31bd0e2b179192ce02e8e20d04d4d523246b72a8ddfe/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 19:57:13 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:13Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268796369Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268860569Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268873669Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.269006570Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	81a5e3779adab       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   19 minutes ago      Running             busybox                   0                   24feeb6878af8       busybox-58667487b6-jn4np
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                                         23 minutes ago      Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                                         23 minutes ago      Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              24 minutes ago      Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                                         24 minutes ago      Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                                         24 minutes ago      Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                                         24 minutes ago      Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                                         24 minutes ago      Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                                         24 minutes ago      Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	[INFO] 10.244.0.3:48144 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000356702s
	[INFO] 10.244.0.3:34835 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 140 0.13279721s
	[INFO] 10.244.0.3:33803 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd 60 0.044468738s
	[INFO] 10.244.0.3:36247 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.087022914s
	[INFO] 10.244.0.3:44861 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000233501s
	[INFO] 10.244.0.3:35209 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.051064673s
	[INFO] 10.244.0.3:55261 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000271302s
	[INFO] 10.244.0.3:50891 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000226301s
	[INFO] 10.244.0.3:47466 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.029589859s
	[INFO] 10.244.0.3:56852 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000128201s
	[INFO] 10.244.0.3:53365 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000172101s
	[INFO] 10.244.0.3:58492 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000296702s
	[INFO] 10.244.0.3:41257 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000186601s
	[INFO] 10.244.0.3:33967 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000235602s
	[INFO] 10.244.0.3:34652 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000120301s
	[INFO] 10.244.0.3:42747 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000392603s
	[INFO] 10.244.0.3:34722 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000319201s
	[INFO] 10.244.0.3:39354 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000278601s
	[INFO] 10.244.0.3:58350 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.0001081s
	[INFO] 10.244.0.3:46798 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.000209101s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:16:40 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-jn4np                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     24m
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         24m
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      24m
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         24m
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         24m
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         24m
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         24m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         24m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 24m   kube-proxy       
	  Normal  Starting                 24m   kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  24m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  24m   kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    24m   kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     24m   kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           24m   node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                23m   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	Name:               multinode-095200-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T20_13_27_0700
	                    minikube.k8s.io/version=v1.35.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 20:13:26 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200-m03
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:16:41 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:26 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:13:59 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.39.251
	  Hostname:    multinode-095200-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 5dd08273ae3d422da88197050793897d
	  System UUID:                eb5f1aa3-1dba-eb43-ad9d-e1ed36b898a1
	  Boot ID:                    d4cb601e-8968-4e50-b6e7-defe0d03ea49
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-js7bg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         19m
	  kube-system                 kindnet-4gvks               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      3m21s
	  kube-system                 kube-proxy-fcgz7            0 (0%)        0 (0%)      0 (0%)           0 (0%)         3m21s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (2%)  50Mi (2%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 3m8s                   kube-proxy       
	  Normal  NodeHasSufficientMemory  3m21s (x2 over 3m21s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    3m21s (x2 over 3m21s)  kubelet          Node multinode-095200-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     3m21s (x2 over 3m21s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  3m21s                  kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           3m18s                  node-controller  Node multinode-095200-m03 event: Registered Node multinode-095200-m03 in Controller
	  Normal  NodeReady                2m48s                  kubelet          Node multinode-095200-m03 status is now: NodeReady
	
	
	==> dmesg <==
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	[Apr 8 19:57] kauditd_printk_skb: 14 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T19:52:13.652359Z","caller":"etcdmain/main.go:50","msg":"successfully notified init daemon"}
	{"level":"info","ts":"2025-04-08T19:52:13.653175Z","caller":"embed/serve.go:250","msg":"serving client traffic securely","traffic":"grpc+http","address":"127.0.0.1:2379"}
	{"level":"info","ts":"2025-04-08T19:52:13.653497Z","caller":"membership/cluster.go:584","msg":"set initial cluster version","cluster-id":"720e62ee34e6c5c","local-member-id":"fd713bd18919f474","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653612Z","caller":"api/capability.go:75","msg":"enabled capabilities for version","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:52:13.653984Z","caller":"etcdserver/server.go:2675","msg":"cluster version is updated","cluster-version":"3.5"}
	{"level":"info","ts":"2025-04-08T19:53:04.028898Z","caller":"traceutil/trace.go:171","msg":"trace[1500643244] transaction","detail":"{read_only:false; response_revision:427; number_of_response:1; }","duration":"208.713694ms","start":"2025-04-08T19:53:03.820165Z","end":"2025-04-08T19:53:04.028879Z","steps":["trace[1500643244] 'process raft request'  (duration: 208.58869ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:02:13.994704Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":650}
	{"level":"info","ts":"2025-04-08T20:02:14.012096Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":650,"took":"16.853983ms","hash":4142537579,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":2072576,"current-db-size-in-use":"2.1 MB"}
	{"level":"info","ts":"2025-04-08T20:02:14.012321Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4142537579,"revision":650,"compact-revision":-1}
	{"level":"info","ts":"2025-04-08T20:07:14.017211Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":892}
	{"level":"info","ts":"2025-04-08T20:07:14.030836Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":892,"took":"13.37027ms","hash":3377129079,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1581056,"current-db-size-in-use":"1.6 MB"}
	{"level":"info","ts":"2025-04-08T20:07:14.030906Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3377129079,"revision":892,"compact-revision":650}
	{"level":"info","ts":"2025-04-08T20:10:48.783716Z","caller":"traceutil/trace.go:171","msg":"trace[1418995672] transaction","detail":"{read_only:false; response_revision:1304; number_of_response:1; }","duration":"172.026431ms","start":"2025-04-08T20:10:48.611671Z","end":"2025-04-08T20:10:48.783698Z","steps":["trace[1418995672] 'process raft request'  (duration: 171.88573ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:10:51.233244Z","caller":"traceutil/trace.go:171","msg":"trace[1434363476] transaction","detail":"{read_only:false; response_revision:1307; number_of_response:1; }","duration":"154.120134ms","start":"2025-04-08T20:10:51.079104Z","end":"2025-04-08T20:10:51.233224Z","steps":["trace[1434363476] 'process raft request'  (duration: 153.912433ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:11:50.092603Z","caller":"traceutil/trace.go:171","msg":"trace[1794927297] transaction","detail":"{read_only:false; response_revision:1355; number_of_response:1; }","duration":"207.047428ms","start":"2025-04-08T20:11:49.885538Z","end":"2025-04-08T20:11:50.092585Z","steps":["trace[1794927297] 'process raft request'  (duration: 206.952227ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:12:14.034956Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1132}
	{"level":"info","ts":"2025-04-08T20:12:14.043791Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":1132,"took":"8.264645ms","hash":3480195930,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1519616,"current-db-size-in-use":"1.5 MB"}
	{"level":"info","ts":"2025-04-08T20:12:14.043914Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3480195930,"revision":1132,"compact-revision":892}
	{"level":"info","ts":"2025-04-08T20:13:19.538892Z","caller":"traceutil/trace.go:171","msg":"trace[539230204] transaction","detail":"{read_only:false; response_revision:1427; number_of_response:1; }","duration":"196.84768ms","start":"2025-04-08T20:13:19.342005Z","end":"2025-04-08T20:13:19.538853Z","steps":["trace[539230204] 'process raft request'  (duration: 140.495971ms)","trace[539230204] 'compare'  (duration: 56.215708ms)"],"step_count":2}
	{"level":"info","ts":"2025-04-08T20:13:37.385572Z","caller":"traceutil/trace.go:171","msg":"trace[614883105] transaction","detail":"{read_only:false; response_revision:1480; number_of_response:1; }","duration":"116.69824ms","start":"2025-04-08T20:13:37.268859Z","end":"2025-04-08T20:13:37.385557Z","steps":["trace[614883105] 'process raft request'  (duration: 116.60574ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:13:37.497911Z","caller":"traceutil/trace.go:171","msg":"trace[1757030073] transaction","detail":"{read_only:false; response_revision:1481; number_of_response:1; }","duration":"112.257716ms","start":"2025-04-08T20:13:37.385615Z","end":"2025-04-08T20:13:37.497873Z","steps":["trace[1757030073] 'process raft request'  (duration: 44.166642ms)","trace[1757030073] 'compare'  (duration: 67.877973ms)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"283.043654ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/jobs/\" range_end:\"/registry/jobs0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132713Z","caller":"traceutil/trace.go:171","msg":"trace[488800171] range","detail":"{range_begin:/registry/jobs/; range_end:/registry/jobs0; response_count:0; response_revision:1492; }","duration":"283.247756ms","start":"2025-04-08T20:13:42.849451Z","end":"2025-04-08T20:13:43.132699Z","steps":["trace[488800171] 'count revisions from in-memory index tree'  (duration: 282.917554ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"126.454095ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/persistentvolumeclaims/\" range_end:\"/registry/persistentvolumeclaims0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132897Z","caller":"traceutil/trace.go:171","msg":"trace[1026523730] range","detail":"{range_begin:/registry/persistentvolumeclaims/; range_end:/registry/persistentvolumeclaims0; response_count:0; response_revision:1492; }","duration":"126.801497ms","start":"2025-04-08T20:13:43.006085Z","end":"2025-04-08T20:13:43.132887Z","steps":["trace[1026523730] 'count revisions from in-memory index tree'  (duration: 126.320694ms)"],"step_count":1}
	
	
	==> kernel <==
	 20:16:47 up 26 min,  0 users,  load average: 0.05, 0.46, 0.55
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 20:15:47.231285       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:15:57.235439       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:15:57.235689       1 main.go:301] handling current node
	I0408 20:15:57.235729       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:15:57.235815       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:16:07.228251       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:16:07.228291       1 main.go:301] handling current node
	I0408 20:16:07.228311       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:16:07.228318       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:16:17.235421       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:16:17.235520       1 main.go:301] handling current node
	I0408 20:16:17.235739       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:16:17.235883       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:16:27.236097       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:16:27.236203       1 main.go:301] handling current node
	I0408 20:16:27.236243       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:16:27.236251       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:16:37.227779       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:16:37.227876       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:16:37.228851       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:16:37.229004       1 main.go:301] handling current node
	I0408 20:16:47.233711       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:16:47.233792       1 main.go:301] handling current node
	I0408 20:16:47.233811       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:16:47.233913       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0408 20:08:59.828087       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59231: use of closed network connection
	E0408 20:09:00.905990       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59236: use of closed network connection
	E0408 20:09:01.943484       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59241: use of closed network connection
	E0408 20:09:40.196961       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59261: use of closed network connection
	E0408 20:09:50.729406       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59263: use of closed network connection
	E0408 20:13:27.732740       1 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0" auditID="25aea89a-4448-4253-aeac-bde38ac93ff6"
	E0408 20:13:27.732687       1 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="9.4µs" method="GET" path="/api/v1/services" result=null
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 19:57:27.107575       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:01:11.176721       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:06:17.042230       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:11:22.697513       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:13:26.955211       1 actual_state_of_world.go:541] "Failed to update statusUpdateNeeded field in actual state of world" logger="persistentvolume-attach-detach-controller" err="Failed to set statusUpdateNeeded to needed true, because nodeName=\"multinode-095200-m03\" does not exist"
	I0408 20:13:26.989485       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="41.6µs"
	I0408 20:13:27.001442       1 range_allocator.go:428] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-095200-m03" podCIDRs=["10.244.1.0/24"]
	I0408 20:13:27.001547       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.001576       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.359144       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.950840       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:29.495284       1 node_lifecycle_controller.go:886] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-095200-m03"
	I0408 20:13:29.512314       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:37.387903       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:57.842537       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.730120       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-095200-m03"
	I0408 20:13:59.731192       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.755844       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.769950       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="121.201µs"
	I0408 20:13:59.789908       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="69.4µs"
	I0408 20:14:02.549158       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="14.149878ms"
	I0408 20:14:02.549975       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="90.9µs"
	I0408 20:14:04.522546       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:14:28.386936       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:16:28.461548       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 20:12:20 multinode-095200 kubelet[2271]: E0408 20:12:20.021916    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:12:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:13:20 multinode-095200 kubelet[2271]: E0408 20:13:20.021956    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:13:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:14:20 multinode-095200 kubelet[2271]: E0408 20:14:20.020995    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:15:20 multinode-095200 kubelet[2271]: E0408 20:15:20.021786    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:16:20 multinode-095200 kubelet[2271]: E0408 20:16:20.020745    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
E0408 20:16:52.274743    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (12.6598853s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiNode/serial/CopyFile FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/CopyFile (72.94s)

                                                
                                    
x
+
TestMultiNode/serial/StopNode (123.88s)

                                                
                                                
=== RUN   TestMultiNode/serial/StopNode
multinode_test.go:248: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 node stop m03
multinode_test.go:248: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 node stop m03: (34.8028515s)
multinode_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 status
multinode_test.go:254: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p multinode-095200 status: exit status 7 (26.8289659s)

                                                
                                                
-- stdout --
	multinode-095200
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-095200-m02
	type: Worker
	host: Running
	kubelet: Stopped
	
	multinode-095200-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
multinode_test.go:261: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr
multinode_test.go:261: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr: exit status 7 (26.8285257s)

                                                
                                                
-- stdout --
	multinode-095200
	type: Control Plane
	host: Running
	kubelet: Running
	apiserver: Running
	kubeconfig: Configured
	
	multinode-095200-m02
	type: Worker
	host: Running
	kubelet: Stopped
	
	multinode-095200-m03
	type: Worker
	host: Stopped
	kubelet: Stopped
	

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 20:18:04.626462    9952 out.go:345] Setting OutFile to fd 1368 ...
	I0408 20:18:04.706524    9952 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:18:04.706524    9952 out.go:358] Setting ErrFile to fd 1524...
	I0408 20:18:04.706524    9952 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 20:18:04.721278    9952 out.go:352] Setting JSON to false
	I0408 20:18:04.721278    9952 mustload.go:65] Loading cluster: multinode-095200
	I0408 20:18:04.721278    9952 notify.go:220] Checking for updates...
	I0408 20:18:04.721978    9952 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 20:18:04.722523    9952 status.go:174] checking status of multinode-095200 ...
	I0408 20:18:04.723382    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:18:06.992124    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:06.992124    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:06.992197    9952 status.go:371] multinode-095200 host status = "Running" (err=<nil>)
	I0408 20:18:06.992197    9952 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:18:06.992967    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:18:09.264129    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:09.264129    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:09.264230    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:18:11.909060    9952 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:18:11.909060    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:11.909060    9952 host.go:66] Checking if "multinode-095200" exists ...
	I0408 20:18:11.921597    9952 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:18:11.921597    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 20:18:14.149331    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:14.150165    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:14.150165    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 20:18:16.767295    9952 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 20:18:16.767295    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:16.767700    9952 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 20:18:16.870240    9952 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9486027s)
	I0408 20:18:16.884151    9952 ssh_runner.go:195] Run: systemctl --version
	I0408 20:18:16.904668    9952 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:18:16.931297    9952 kubeconfig.go:125] found "multinode-095200" server: "https://172.22.37.202:8443"
	I0408 20:18:16.931415    9952 api_server.go:166] Checking apiserver status ...
	I0408 20:18:16.944156    9952 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 20:18:16.979916    9952 ssh_runner.go:195] Run: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup
	W0408 20:18:16.997836    9952 api_server.go:177] unable to find freezer cgroup: sudo egrep ^[0-9]+:freezer: /proc/2089/cgroup: Process exited with status 1
	stdout:
	
	stderr:
	I0408 20:18:17.009416    9952 ssh_runner.go:195] Run: ls
	I0408 20:18:17.017589    9952 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 20:18:17.024705    9952 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 20:18:17.024705    9952 status.go:463] multinode-095200 apiserver status = Running (err=<nil>)
	I0408 20:18:17.024705    9952 status.go:176] multinode-095200 status: &{Name:multinode-095200 Host:Running Kubelet:Running APIServer:Running Kubeconfig:Configured Worker:false TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:18:17.024705    9952 status.go:174] checking status of multinode-095200-m02 ...
	I0408 20:18:17.025195    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:18:19.276688    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:19.277710    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:19.277710    9952 status.go:371] multinode-095200-m02 host status = "Running" (err=<nil>)
	I0408 20:18:19.277811    9952 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:18:19.278020    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:18:21.493292    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:21.493292    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:21.493292    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:18:24.084253    9952 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:18:24.085333    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:24.085333    9952 host.go:66] Checking if "multinode-095200-m02" exists ...
	I0408 20:18:24.097531    9952 ssh_runner.go:195] Run: sh -c "df -h /var | awk 'NR==2{print $5}'"
	I0408 20:18:24.097531    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 20:18:26.314731    9952 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 20:18:26.314731    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:26.315301    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 20:18:28.966096    9952 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 20:18:28.966096    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:28.966724    9952 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 20:18:29.073638    9952 ssh_runner.go:235] Completed: sh -c "df -h /var | awk 'NR==2{print $5}'": (4.9760671s)
	I0408 20:18:29.086360    9952 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 20:18:29.113004    9952 status.go:176] multinode-095200-m02 status: &{Name:multinode-095200-m02 Host:Running Kubelet:Stopped APIServer:Irrelevant Kubeconfig:Irrelevant Worker:true TimeToStop: DockerEnv: PodManEnv:}
	I0408 20:18:29.113004    9952 status.go:174] checking status of multinode-095200-m03 ...
	I0408 20:18:29.113959    9952 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m03 ).state
	I0408 20:18:31.293573    9952 main.go:141] libmachine: [stdout =====>] : Off
	
	I0408 20:18:31.293648    9952 main.go:141] libmachine: [stderr =====>] : 
	I0408 20:18:31.293648    9952 status.go:371] multinode-095200-m03 host status = "Stopped" (err=<nil>)
	I0408 20:18:31.293648    9952 status.go:384] host is not running, skipping remaining checks
	I0408 20:18:31.293648    9952 status.go:176] multinode-095200-m03 status: &{Name:multinode-095200-m03 Host:Stopped Kubelet:Stopped APIServer:Stopped Kubeconfig:Stopped Worker:true TimeToStop: DockerEnv: PodManEnv:}

                                                
                                                
** /stderr **
multinode_test.go:267: incorrect number of running kubelets: args "out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr": multinode-095200
type: Control Plane
host: Running
kubelet: Running
apiserver: Running
kubeconfig: Configured

                                                
                                                
multinode-095200-m02
type: Worker
host: Running
kubelet: Stopped

                                                
                                                
multinode-095200-m03
type: Worker
host: Stopped
kubelet: Stopped

                                                
                                                
multinode_test.go:275: incorrect number of stopped kubelets: args "out/minikube-windows-amd64.exe -p multinode-095200 status --alsologtostderr": multinode-095200
type: Control Plane
host: Running
kubelet: Running
apiserver: Running
kubeconfig: Configured

                                                
                                                
multinode-095200-m02
type: Worker
host: Running
kubelet: Stopped

                                                
                                                
multinode-095200-m03
type: Worker
host: Stopped
kubelet: Stopped

                                                
                                                
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200
helpers_test.go:239: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.Host}} -p multinode-095200 -n multinode-095200: (12.3410821s)
helpers_test.go:244: <<< TestMultiNode/serial/StopNode FAILED: start of post-mortem logs <<<
helpers_test.go:245: ======>  post-mortem[TestMultiNode/serial/StopNode]: minikube logs <======
helpers_test.go:247: (dbg) Run:  out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25
helpers_test.go:247: (dbg) Done: out/minikube-windows-amd64.exe -p multinode-095200 logs -n 25: (8.7507929s)
helpers_test.go:252: TestMultiNode/serial/StopNode logs: 
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| Command |                 Args                 |     Profile      |       User        | Version |     Start Time      |      End Time       |
	|---------|--------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	| kubectl | -p multinode-095200 -- rollout       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 19:57 UTC |                     |
	|         | status deployment/busybox            |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:07 UTC | 08 Apr 25 20:07 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].status.podIP}'  |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:08 UTC | 08 Apr 25 20:08 UTC |
	|         | busybox-58667487b6-jn4np --          |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --          |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.io               |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np --          |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default          |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg --          |                  |                   |         |                     |                     |
	|         | nslookup kubernetes.default          |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np -- nslookup |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg -- nslookup |                  |                   |         |                     |                     |
	|         | kubernetes.default.svc.cluster.local |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- get pods -o   | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | jsonpath='{.items[*].metadata.name}' |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC | 08 Apr 25 20:09 UTC |
	|         | busybox-58667487b6-jn4np             |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                    |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk         |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-jn4np -- sh       |                  |                   |         |                     |                     |
	|         | -c ping -c 1 172.22.32.1             |                  |                   |         |                     |                     |
	| kubectl | -p multinode-095200 -- exec          | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:09 UTC |                     |
	|         | busybox-58667487b6-js7bg             |                  |                   |         |                     |                     |
	|         | -- sh -c nslookup                    |                  |                   |         |                     |                     |
	|         | host.minikube.internal | awk         |                  |                   |         |                     |                     |
	|         | 'NR==5' | cut -d' ' -f3              |                  |                   |         |                     |                     |
	| node    | add -p multinode-095200 -v 3         | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:10 UTC | 08 Apr 25 20:14 UTC |
	|         | --alsologtostderr                    |                  |                   |         |                     |                     |
	| node    | multinode-095200 node stop m03       | multinode-095200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 20:17 UTC | 08 Apr 25 20:17 UTC |
	|---------|--------------------------------------|------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 19:49:00
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 19:49:00.774746    9872 out.go:345] Setting OutFile to fd 1720 ...
	I0408 19:49:00.854161    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.854161    9872 out.go:358] Setting ErrFile to fd 1728...
	I0408 19:49:00.854232    9872 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 19:49:00.873235    9872 out.go:352] Setting JSON to false
	I0408 19:49:00.876413    9872 start.go:129] hostinfo: {"hostname":"minikube3","uptime":102526,"bootTime":1744039214,"procs":179,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 19:49:00.876413    9872 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 19:49:00.883590    9872 out.go:177] * [multinode-095200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 19:49:00.887524    9872 notify.go:220] Checking for updates...
	I0408 19:49:00.889385    9872 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:49:00.892643    9872 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 19:49:00.894907    9872 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 19:49:00.899137    9872 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 19:49:00.906230    9872 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 19:49:00.909602    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:49:00.910459    9872 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 19:49:06.852032    9872 out.go:177] * Using the hyperv driver based on user configuration
	I0408 19:49:06.855557    9872 start.go:297] selected driver: hyperv
	I0408 19:49:06.855557    9872 start.go:901] validating driver "hyperv" against <nil>
	I0408 19:49:06.855557    9872 start.go:912] status for hyperv: {Installed:true Healthy:true Running:false NeedsImprovement:false Error:<nil> Reason: Fix: Doc: Version:}
	I0408 19:49:06.910781    9872 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 19:49:06.912035    9872 start_flags.go:947] Waiting for all components: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:49:06.912035    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:49:06.912035    9872 cni.go:136] multinode detected (0 nodes found), recommending kindnet
	I0408 19:49:06.912035    9872 start_flags.go:319] Found "CNI" CNI - setting NetworkPlugin=cni
	I0408 19:49:06.912841    9872 start.go:340] cluster config:
	{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerR
untime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: Stat
icIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:49:06.912888    9872 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 19:49:06.917729    9872 out.go:177] * Starting "multinode-095200" primary control-plane node in "multinode-095200" cluster
	I0408 19:49:06.920385    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:49:06.920385    9872 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 19:49:06.920385    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:49:06.920897    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:49:06.920897    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:49:06.921324    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:49:06.921557    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json: {Name:mk78e2d37df7cf4fc8fb4dfd693b2de2a7661f7e Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:360] acquireMachinesLock for multinode-095200: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:49:06.921795    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200"
	I0408 19:49:06.923006    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountTy
pe:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:49:06.923006    9872 start.go:125] createHost starting for "" (driver="hyperv")
	I0408 19:49:06.927683    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:49:06.928030    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:49:06.928030    9872 client.go:168] LocalClient.Create starting
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:49:06.928349    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929085    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929296    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:49:06.929533    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:49:09.169280    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:09.169655    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:49:11.027494    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:11.028417    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:12.608624    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:12.608851    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:16.587237    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:16.588200    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:16.590521    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:49:17.211076    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: Creating VM...
	I0408 19:49:17.921718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:49:21.119182    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:21.119261    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:49:21.119261    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:23.067764    9872 main.go:141] libmachine: Creating VHD
	I0408 19:49:23.067764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2D083087-07C0-405C-9557-7CFC25D895C0
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:49:27.018673    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:49:27.019108    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:49:27.036035    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:30.271373    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd' -SizeBytes 20000MB
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:32.881567    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:32.882489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:49:36.850902    9872 main.go:141] libmachine: [stdout =====>] : 
	Name             State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----             ----- ----------- ----------------- ------   ------             -------
	multinode-095200 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:49:36.851303    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:36.851363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200 -DynamicMemoryEnabled $false
	I0408 19:49:39.236351    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:39.236990    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200 -Count 2
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:41.537575    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\boot2docker.iso'
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:44.284258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:44.285231    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\disk.vhd'
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:47.121067    9872 main.go:141] libmachine: Starting VM...
	I0408 19:49:47.121067    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:50.359525    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:49:50.359685    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:52.816297    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:52.816448    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:49:55.521909    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:56.523279    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:49:58.910903    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:49:58.911436    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:01.613461    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:01.613533    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:02.614287    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:04.934071    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:07.596707    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:08.597651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:10.953863    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:50:13.688077    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:14.689112    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:17.088930    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:17.089798    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:19.804628    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:19.805668    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:22.062744    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:22.063522    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:50:22.063671    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:24.434716    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:24.435510    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:27.196377    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:27.201930    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:27.223426    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:27.223426    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:50:27.368213    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:50:27.368770    9872 buildroot.go:166] provisioning hostname "multinode-095200"
	I0408 19:50:27.369059    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:29.717210    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:29.717387    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:32.434750    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:32.434803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:32.441731    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:32.442410    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:32.442410    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200 && echo "multinode-095200" | sudo tee /etc/hostname
	I0408 19:50:32.627901    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200
	
	I0408 19:50:32.628490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:34.952621    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:34.952819    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:37.705655    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:37.710606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:37.710606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:37.710606    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:50:37.876844    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:50:37.876844    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:50:37.876844    9872 buildroot.go:174] setting up certificates
	I0408 19:50:37.876844    9872 provision.go:84] configureAuth start
	I0408 19:50:37.876844    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:40.157036    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:40.157942    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:42.848556    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:45.080048    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:45.080202    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:47.808313    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:47.809430    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:47.809430    9872 provision.go:143] copyHostCerts
	I0408 19:50:47.809696    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:50:47.809898    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:50:47.809898    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:50:47.810449    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:50:47.811687    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:50:47.811927    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:50:47.811927    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:50:47.812315    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:50:47.813415    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:50:47.813682    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:50:47.813764    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:50:47.814035    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:50:47.815161    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200 san=[127.0.0.1 172.22.37.202 localhost minikube multinode-095200]
	I0408 19:50:48.678934    9872 provision.go:177] copyRemoteCerts
	I0408 19:50:48.693472    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:50:48.694097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:50.963652    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:50.964097    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:53.691831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:53.692818    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:53.693102    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:50:53.814949    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.1214371s)
	I0408 19:50:53.815107    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:50:53.815248    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:50:53.867121    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:50:53.868698    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1216 bytes)
	I0408 19:50:53.917791    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:50:53.918057    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:50:53.966390    9872 provision.go:87] duration metric: took 16.0893565s to configureAuth
	I0408 19:50:53.966478    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:50:53.966478    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:50:53.967103    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:50:56.175723    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:56.175838    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:50:58.855480    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:50:58.856125    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:50:58.864598    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:50:58.865388    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:50:58.865388    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:50:59.003848    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:50:59.003848    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:50:59.003848    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:50:59.003848    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:01.266687    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:01.266794    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:01.266885    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:04.007425    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:04.013853    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:04.014492    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:04.014492    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:51:04.188907    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:51:04.189195    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:06.430473    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:06.431168    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:06.431286    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:09.148704    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:09.148952    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:09.155411    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:09.155953    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:09.156141    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:51:11.533188    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:51:11.533188    9872 machine.go:96] duration metric: took 49.46928s to provisionDockerMachine
	I0408 19:51:11.533188    9872 client.go:171] duration metric: took 2m4.6041787s to LocalClient.Create
	I0408 19:51:11.533389    9872 start.go:167] duration metric: took 2m4.6043801s to libmachine.API.Create "multinode-095200"
	I0408 19:51:11.533389    9872 start.go:293] postStartSetup for "multinode-095200" (driver="hyperv")
	I0408 19:51:11.533389    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:51:11.547277    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:51:11.547277    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:13.827250    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:13.828363    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:16.687232    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:16.688143    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:16.688318    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:16.804012    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.2566941s)
	I0408 19:51:16.817222    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:51:16.824702    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:51:16.824702    9872 command_runner.go:130] > ID=buildroot
	I0408 19:51:16.824702    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:51:16.824702    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:51:16.824844    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:51:16.824844    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:51:16.825476    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:51:16.826527    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:51:16.826527    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:51:16.838376    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:51:16.855706    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:51:16.904095    9872 start.go:296] duration metric: took 5.3706643s for postStartSetup
	I0408 19:51:16.906686    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:19.242872    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:19.243281    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:19.243361    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:21.972488    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:21.973777    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:51:21.976538    9872 start.go:128] duration metric: took 2m15.0524723s to createHost
	I0408 19:51:21.976538    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:24.239099    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:24.240094    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:24.240134    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:27.004927    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:27.011512    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:27.012226    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:27.012226    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:51:27.154332    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744141887.160023125
	
	I0408 19:51:27.154332    9872 fix.go:216] guest clock: 1744141887.160023125
	I0408 19:51:27.154332    9872 fix.go:229] Guest: 2025-04-08 19:51:27.160023125 +0000 UTC Remote: 2025-04-08 19:51:21.9765387 +0000 UTC m=+141.301420501 (delta=5.183484425s)
	I0408 19:51:27.154332    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:29.454605    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:29.454840    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:32.202707    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:32.203245    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:32.211000    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:51:32.211000    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.37.202 22 <nil> <nil>}
	I0408 19:51:32.211000    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744141887
	I0408 19:51:32.368415    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:51:27 UTC 2025
	
	I0408 19:51:32.369843    9872 fix.go:236] clock set: Tue Apr  8 19:51:27 UTC 2025
	 (err=<nil>)
	I0408 19:51:32.369843    9872 start.go:83] releasing machines lock for "multinode-095200", held for 2m25.44588s
	I0408 19:51:32.370098    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:34.684521    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:34.685447    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:37.432681    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:37.432714    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:37.437108    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:51:37.437108    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:37.446764    9872 ssh_runner.go:195] Run: cat /version.json
	I0408 19:51:37.446764    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.735324    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:39.775923    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:51:39.776817    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:39.776928    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:51:42.449096    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.450145    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.450214    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.562591    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:51:42.563278    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.1261297s)
	W0408 19:51:42.563278    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:51:42.564119    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:51:42.564207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:51:42.564323    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:51:42.673145    9872 command_runner.go:130] > {"iso_version": "v1.35.0", "kicbase_version": "v0.0.45-1736763277-20236", "minikube_version": "v1.35.0", "commit": "3fb24bd87c8c8761e2515e1a9ee13835a389ed68"}
	I0408 19:51:42.673145    9872 ssh_runner.go:235] Completed: cat /version.json: (5.2263401s)
	W0408 19:51:42.682509    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:51:42.682509    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:51:42.689482    9872 ssh_runner.go:195] Run: systemctl --version
	I0408 19:51:42.698550    9872 command_runner.go:130] > systemd 252 (252)
	I0408 19:51:42.698550    9872 command_runner.go:130] > -PAM -AUDIT -SELINUX -APPARMOR -IMA -SMACK +SECCOMP +GCRYPT -GNUTLS +OPENSSL +ACL +BLKID +CURL -ELFUTILS -FIDO2 -IDN2 -IDN +IPTC +KMOD -LIBCRYPTSETUP +LIBFDISK -PCRE2 -PWQUALITY -P11KIT -QRENCODE -TPM2 -BZIP2 +LZ4 +XZ +ZLIB -ZSTD -BPF_FRAMEWORK -XKBCOMMON -UTMP -SYSVINIT default-hierarchy=unified
	I0408 19:51:42.708774    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:51:42.719995    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	W0408 19:51:42.720585    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:51:42.732955    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:51:42.764191    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:51:42.764191    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:51:42.764191    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:42.764191    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:42.799904    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:51:42.811105    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	I0408 19:51:42.845457    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:51:42.865425    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:51:42.876200    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:51:42.905672    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:42.936405    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:51:42.970517    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:51:43.003607    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:51:43.038335    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:51:43.074713    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:51:43.106155    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:51:43.135105    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:51:43.154753    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.155294    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:51:43.165697    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:51:43.203414    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:51:43.234065    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:43.453337    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:51:43.485536    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:51:43.497145    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Unit]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:51:43.520154    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:51:43.520154    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:51:43.520154    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:51:43.520154    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:51:43.520154    9872 command_runner.go:130] > [Service]
	I0408 19:51:43.520154    9872 command_runner.go:130] > Type=notify
	I0408 19:51:43.520154    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:51:43.520154    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:51:43.520154    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:51:43.520154    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:51:43.520154    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:51:43.520154    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:51:43.521138    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:51:43.521138    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:51:43.521138    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:51:43.521138    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:51:43.521138    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:51:43.521138    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:51:43.521138    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:51:43.521138    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:51:43.521138    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:51:43.521138    9872 command_runner.go:130] > Delegate=yes
	I0408 19:51:43.521138    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:51:43.521138    9872 command_runner.go:130] > KillMode=process
	I0408 19:51:43.521138    9872 command_runner.go:130] > [Install]
	I0408 19:51:43.521138    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:51:43.531142    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.564627    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:51:43.604884    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:51:43.639845    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.678041    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:51:43.743053    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:51:43.769057    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:51:43.806631    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:51:43.818504    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:51:43.824406    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:51:43.836334    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:51:43.854403    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:51:43.897618    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:51:44.099015    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:51:44.293120    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:51:44.293120    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:51:44.339290    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:44.551671    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:51:47.186394    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (2.6347019s)
	I0408 19:51:47.197682    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.socket
	I0408 19:51:47.236790    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:47.283632    9872 ssh_runner.go:195] Run: sudo systemctl unmask cri-docker.socket
	I0408 19:51:47.498013    9872 ssh_runner.go:195] Run: sudo systemctl enable cri-docker.socket
	I0408 19:51:47.723011    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:47.937782    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.socket
	I0408 19:51:47.980394    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service cri-docker.service
	I0408 19:51:48.017482    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:51:48.227588    9872 ssh_runner.go:195] Run: sudo systemctl restart cri-docker.service
	I0408 19:51:48.338746    9872 start.go:542] Will wait 60s for socket path /var/run/cri-dockerd.sock
	I0408 19:51:48.351655    9872 ssh_runner.go:195] Run: stat /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   File: /var/run/cri-dockerd.sock
	I0408 19:51:48.360472    9872 command_runner.go:130] >   Size: 0         	Blocks: 0          IO Block: 4096   socket
	I0408 19:51:48.360538    9872 command_runner.go:130] > Device: 0,22	Inode: 880         Links: 1
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: (0660/srw-rw----)  Uid: (    0/    root)   Gid: ( 1000/  docker)
	I0408 19:51:48.360538    9872 command_runner.go:130] > Access: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360538    9872 command_runner.go:130] > Modify: 2025-04-08 19:51:48.261876239 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] > Change: 2025-04-08 19:51:48.265876245 +0000
	I0408 19:51:48.360594    9872 command_runner.go:130] >  Birth: -
	I0408 19:51:48.360594    9872 start.go:563] Will wait 60s for crictl version
	I0408 19:51:48.375709    9872 ssh_runner.go:195] Run: which crictl
	I0408 19:51:48.382458    9872 command_runner.go:130] > /usr/bin/crictl
	I0408 19:51:48.391827    9872 ssh_runner.go:195] Run: sudo /usr/bin/crictl version
	I0408 19:51:48.457285    9872 command_runner.go:130] > Version:  0.1.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeName:  docker
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeVersion:  27.4.0
	I0408 19:51:48.457321    9872 command_runner.go:130] > RuntimeApiVersion:  v1
	I0408 19:51:48.457321    9872 start.go:579] Version:  0.1.0
	RuntimeName:  docker
	RuntimeVersion:  27.4.0
	RuntimeApiVersion:  v1
	I0408 19:51:48.470572    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.510244    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.521165    9872 ssh_runner.go:195] Run: docker version --format {{.Server.Version}}
	I0408 19:51:48.558151    9872 command_runner.go:130] > 27.4.0
	I0408 19:51:48.564636    9872 out.go:235] * Preparing Kubernetes v1.32.2 on Docker 27.4.0 ...
	I0408 19:51:48.564636    9872 ip.go:176] getIPForInterface: searching for "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Ethernet 2" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:190] "Loopback Pseudo-Interface 1" does not match prefix "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:185] found prefix matching interface for "vEthernet (Default Switch)": "vEthernet (Default Switch)"
	I0408 19:51:48.569604    9872 ip.go:211] Found interface: {Index:14 MTU:1500 Name:vEthernet (Default Switch) HardwareAddr:00:15:5d:e6:f3:f4 Flags:up|broadcast|multicast|running}
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: fe80::d768:9068:dab9:f4a/64
	I0408 19:51:48.572723    9872 ip.go:214] interface addr: 172.22.32.1/20
	I0408 19:51:48.584587    9872 ssh_runner.go:195] Run: grep 172.22.32.1	host.minikube.internal$ /etc/hosts
	I0408 19:51:48.591465    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\thost.minikube.internal$' "/etc/hosts"; echo "172.22.32.1	host.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:51:48.616995    9872 kubeadm.go:883] updating cluster {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0
95200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p
MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} ...
	I0408 19:51:48.617231    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:51:48.627145    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:51:48.654811    9872 docker.go:689] Got preloaded images: 
	I0408 19:51:48.654912    9872 docker.go:695] registry.k8s.io/kube-apiserver:v1.32.2 wasn't preloaded
	I0408 19:51:48.667254    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:51:48.685248    9872 command_runner.go:139] > {"Repositories":{}}
	I0408 19:51:48.696660    9872 ssh_runner.go:195] Run: which lz4
	I0408 19:51:48.704673    9872 command_runner.go:130] > /usr/bin/lz4
	I0408 19:51:48.705266    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 -> /preloaded.tar.lz4
	I0408 19:51:48.716220    9872 ssh_runner.go:195] Run: stat -c "%s %y" /preloaded.tar.lz4
	I0408 19:51:48.722889    9872 command_runner.go:130] ! stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723059    9872 ssh_runner.go:352] existence check for /preloaded.tar.lz4: stat -c "%s %y" /preloaded.tar.lz4: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/preloaded.tar.lz4': No such file or directory
	I0408 19:51:48.723175    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 --> /preloaded.tar.lz4 (349803115 bytes)
	I0408 19:51:50.984386    9872 docker.go:653] duration metric: took 2.279102s to copy over tarball
	I0408 19:51:50.997700    9872 ssh_runner.go:195] Run: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4
	I0408 19:52:00.039743    9872 ssh_runner.go:235] Completed: sudo tar --xattrs --xattrs-include security.capability -I lz4 -C /var -xf /preloaded.tar.lz4: (9.0419723s)
	I0408 19:52:00.039743    9872 ssh_runner.go:146] rm: /preloaded.tar.lz4
	I0408 19:52:00.115462    9872 ssh_runner.go:195] Run: sudo cat /var/lib/docker/image/overlay2/repositories.json
	I0408 19:52:00.135072    9872 command_runner.go:139] > {"Repositories":{"gcr.io/k8s-minikube/storage-provisioner":{"gcr.io/k8s-minikube/storage-provisioner:v5":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562","gcr.io/k8s-minikube/storage-provisioner@sha256:18eb69d1418e854ad5a19e399310e52808a8321e4c441c1dddad8977a0d7a944":"sha256:6e38f40d628db3002f5617342c8872c935de530d867d0f709a2fbda1a302a562"},"registry.k8s.io/coredns/coredns":{"registry.k8s.io/coredns/coredns:v1.11.3":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6","registry.k8s.io/coredns/coredns@sha256:9caabbf6238b189a65d0d6e6ac138de60d6a1c419e5a341fbbb7c78382559c6e":"sha256:c69fa2e9cbf5f42dc48af631e956d3f95724c13f91596bc567591790e5e36db6"},"registry.k8s.io/etcd":{"registry.k8s.io/etcd:3.5.16-0":"sha256:a9e7e6b294baf1695fccb862d956c5d3ad8510e1e4ca1535f35dc09f247abbfc","registry.k8s.io/etcd@sha256:c6a9d11cc5c04b114ccdef39a9265eeef818e3d02f5359be035ae784097fdec5":"sha256:a9e7e6b294baf1695fccb862d95
6c5d3ad8510e1e4ca1535f35dc09f247abbfc"},"registry.k8s.io/kube-apiserver":{"registry.k8s.io/kube-apiserver:v1.32.2":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef","registry.k8s.io/kube-apiserver@sha256:c47449f3e751588ea0cb74e325e0f83db335a415f4f4c7fb147375dd6c84757f":"sha256:85b7a174738baecbc53029b7913cd430a2060e0cbdb5f56c7957d32ff7f241ef"},"registry.k8s.io/kube-controller-manager":{"registry.k8s.io/kube-controller-manager:v1.32.2":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389","registry.k8s.io/kube-controller-manager@sha256:399aa50f4d1361c59dc458e634506d02de32613d03a9a614a21058741162ef90":"sha256:b6a454c5a800d201daacead6ff195ec6049fe6dc086621b0670bca912efaf389"},"registry.k8s.io/kube-proxy":{"registry.k8s.io/kube-proxy:v1.32.2":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68ff49a87c2266ebc5","registry.k8s.io/kube-proxy@sha256:83c025f0faa6799fab6645102a98138e39a9a7db2be3bc792c79d72659b1805d":"sha256:f1332858868e1c6a905123b21e2e322ab45a5b99a3532e68f
f49a87c2266ebc5"},"registry.k8s.io/kube-scheduler":{"registry.k8s.io/kube-scheduler:v1.32.2":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d","registry.k8s.io/kube-scheduler@sha256:45710d74cfd5aa10a001d0cf81747b77c28617444ffee0503d12f1dcd7450f76":"sha256:d8e673e7c9983f1f53569a9d2ba786c8abb42e3f744f77dc97a595f3caf9435d"},"registry.k8s.io/pause":{"registry.k8s.io/pause:3.10":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136","registry.k8s.io/pause@sha256:ee6521f290b2168b6e0935a181d4cff9be1ac3f505666ef0e3c98fae8199917a":"sha256:873ed75102791e5b0b8a7fcd41606c92fcec98d56d05ead4ac5131650004c136"}}}
	I0408 19:52:00.135072    9872 ssh_runner.go:362] scp memory --> /var/lib/docker/image/overlay2/repositories.json (2631 bytes)
	I0408 19:52:00.183320    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:00.407305    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:52:03.551928    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (3.1445584s)
	I0408 19:52:03.562680    9872 ssh_runner.go:195] Run: docker images --format {{.Repository}}:{{.Tag}}
	I0408 19:52:03.591175    9872 command_runner.go:130] > registry.k8s.io/kube-apiserver:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-proxy:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-controller-manager:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/kube-scheduler:v1.32.2
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/etcd:3.5.16-0
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/coredns/coredns:v1.11.3
	I0408 19:52:03.592339    9872 command_runner.go:130] > registry.k8s.io/pause:3.10
	I0408 19:52:03.592440    9872 command_runner.go:130] > gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:03.592440    9872 docker.go:689] Got preloaded images: -- stdout --
	registry.k8s.io/kube-apiserver:v1.32.2
	registry.k8s.io/kube-proxy:v1.32.2
	registry.k8s.io/kube-controller-manager:v1.32.2
	registry.k8s.io/kube-scheduler:v1.32.2
	registry.k8s.io/etcd:3.5.16-0
	registry.k8s.io/coredns/coredns:v1.11.3
	registry.k8s.io/pause:3.10
	gcr.io/k8s-minikube/storage-provisioner:v5
	
	-- /stdout --
	I0408 19:52:03.592440    9872 cache_images.go:84] Images are preloaded, skipping loading
	I0408 19:52:03.592561    9872 kubeadm.go:934] updating node { 172.22.37.202 8443 v1.32.2 docker true true} ...
	I0408 19:52:03.592793    9872 kubeadm.go:946] kubelet [Unit]
	Wants=docker.socket
	
	[Service]
	ExecStart=
	ExecStart=/var/lib/minikube/binaries/v1.32.2/kubelet --bootstrap-kubeconfig=/etc/kubernetes/bootstrap-kubelet.conf --config=/var/lib/kubelet/config.yaml --hostname-override=multinode-095200 --kubeconfig=/etc/kubernetes/kubelet.conf --node-ip=172.22.37.202
	
	[Install]
	 config:
	{KubernetesVersion:v1.32.2 ClusterName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:}
	I0408 19:52:03.603344    9872 ssh_runner.go:195] Run: docker info --format {{.CgroupDriver}}
	I0408 19:52:03.672552    9872 command_runner.go:130] > cgroupfs
	I0408 19:52:03.674789    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:03.675823    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:03.675823    9872 kubeadm.go:84] Using pod CIDR: 10.244.0.0/16
	I0408 19:52:03.675823    9872 kubeadm.go:189] kubeadm options: {CertDir:/var/lib/minikube/certs ServiceCIDR:10.96.0.0/12 PodSubnet:10.244.0.0/16 AdvertiseAddress:172.22.37.202 APIServerPort:8443 KubernetesVersion:v1.32.2 EtcdDataDir:/var/lib/minikube/etcd EtcdExtraArgs:map[] ClusterName:multinode-095200 NodeName:multinode-095200 DNSDomain:cluster.local CRISocket:/var/run/cri-dockerd.sock ImageRepository: ComponentOptions:[{Component:apiServer ExtraArgs:map[enable-admission-plugins:NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota] Pairs:map[certSANs:["127.0.0.1", "localhost", "172.22.37.202"]]} {Component:controllerManager ExtraArgs:map[allocate-node-cidrs:true leader-elect:false] Pairs:map[]} {Component:scheduler ExtraArgs:map[leader-elect:false] Pairs:map[]}] FeatureArgs:map[] NodeIP:172.22.37.202 CgroupDriver:cgroupfs ClientCAFile:/var/lib/minikube/certs/ca.crt StaticPodPath:/
etc/kubernetes/manifests ControlPlaneAddress:control-plane.minikube.internal KubeProxyOptions:map[] ResolvConfSearchRegression:false KubeletConfigOpts:map[containerRuntimeEndpoint:unix:///var/run/cri-dockerd.sock hairpinMode:hairpin-veth runtimeRequestTimeout:15m] PrependCriSocketUnix:true}
	I0408 19:52:03.675823    9872 kubeadm.go:195] kubeadm config:
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: InitConfiguration
	localAPIEndpoint:
	  advertiseAddress: 172.22.37.202
	  bindPort: 8443
	bootstrapTokens:
	  - groups:
	      - system:bootstrappers:kubeadm:default-node-token
	    ttl: 24h0m0s
	    usages:
	      - signing
	      - authentication
	nodeRegistration:
	  criSocket: unix:///var/run/cri-dockerd.sock
	  name: "multinode-095200"
	  kubeletExtraArgs:
	    - name: "node-ip"
	      value: "172.22.37.202"
	  taints: []
	---
	apiVersion: kubeadm.k8s.io/v1beta4
	kind: ClusterConfiguration
	apiServer:
	  certSANs: ["127.0.0.1", "localhost", "172.22.37.202"]
	  extraArgs:
	    - name: "enable-admission-plugins"
	      value: "NamespaceLifecycle,LimitRanger,ServiceAccount,DefaultStorageClass,DefaultTolerationSeconds,NodeRestriction,MutatingAdmissionWebhook,ValidatingAdmissionWebhook,ResourceQuota"
	controllerManager:
	  extraArgs:
	    - name: "allocate-node-cidrs"
	      value: "true"
	    - name: "leader-elect"
	      value: "false"
	scheduler:
	  extraArgs:
	    - name: "leader-elect"
	      value: "false"
	certificatesDir: /var/lib/minikube/certs
	clusterName: mk
	controlPlaneEndpoint: control-plane.minikube.internal:8443
	etcd:
	  local:
	    dataDir: /var/lib/minikube/etcd
	    extraArgs:
	      - name: "proxy-refresh-interval"
	        value: "70000"
	kubernetesVersion: v1.32.2
	networking:
	  dnsDomain: cluster.local
	  podSubnet: "10.244.0.0/16"
	  serviceSubnet: 10.96.0.0/12
	---
	apiVersion: kubelet.config.k8s.io/v1beta1
	kind: KubeletConfiguration
	authentication:
	  x509:
	    clientCAFile: /var/lib/minikube/certs/ca.crt
	cgroupDriver: cgroupfs
	containerRuntimeEndpoint: unix:///var/run/cri-dockerd.sock
	hairpinMode: hairpin-veth
	runtimeRequestTimeout: 15m
	clusterDomain: "cluster.local"
	# disable disk resource management by default
	imageGCHighThresholdPercent: 100
	evictionHard:
	  nodefs.available: "0%"
	  nodefs.inodesFree: "0%"
	  imagefs.available: "0%"
	failSwapOn: false
	staticPodPath: /etc/kubernetes/manifests
	---
	apiVersion: kubeproxy.config.k8s.io/v1alpha1
	kind: KubeProxyConfiguration
	clusterCIDR: "10.244.0.0/16"
	metricsBindAddress: 0.0.0.0:10249
	conntrack:
	  maxPerCore: 0
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_established"
	  tcpEstablishedTimeout: 0s
	# Skip setting "net.netfilter.nf_conntrack_tcp_timeout_close"
	  tcpCloseWaitTimeout: 0s
	
	I0408 19:52:03.688306    9872 ssh_runner.go:195] Run: sudo ls /var/lib/minikube/binaries/v1.32.2
	I0408 19:52:03.706521    9872 command_runner.go:130] > kubeadm
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubectl
	I0408 19:52:03.707656    9872 command_runner.go:130] > kubelet
	I0408 19:52:03.707758    9872 binaries.go:44] Found k8s binaries, skipping transfer
	I0408 19:52:03.719767    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/kubelet.service.d /lib/systemd/system /var/tmp/minikube
	I0408 19:52:03.741106    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/kubelet.service.d/10-kubeadm.conf (316 bytes)
	I0408 19:52:03.774136    9872 ssh_runner.go:362] scp memory --> /lib/systemd/system/kubelet.service (352 bytes)
	I0408 19:52:03.807257    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/kubeadm.yaml.new (2297 bytes)
	I0408 19:52:03.851433    9872 ssh_runner.go:195] Run: grep 172.22.37.202	control-plane.minikube.internal$ /etc/hosts
	I0408 19:52:03.860664    9872 ssh_runner.go:195] Run: /bin/bash -c "{ grep -v $'\tcontrol-plane.minikube.internal$' "/etc/hosts"; echo "172.22.37.202	control-plane.minikube.internal"; } > /tmp/h.$$; sudo cp /tmp/h.$$ "/etc/hosts""
	I0408 19:52:03.891633    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:04.098085    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:04.130427    9872 certs.go:68] Setting up C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200 for IP: 172.22.37.202
	I0408 19:52:04.130456    9872 certs.go:194] generating shared ca certs ...
	I0408 19:52:04.130456    9872 certs.go:226] acquiring lock for ca certs: {Name:mk09ff4ada22228900e1815c250154c7d8d76854 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "minikubeCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key
	I0408 19:52:04.130617    9872 certs.go:235] skipping valid "proxyClientCA" ca cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key
	I0408 19:52:04.131694    9872 certs.go:256] generating profile certs ...
	I0408 19:52:04.132060    9872 certs.go:363] generating signed profile cert for "minikube-user": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key
	I0408 19:52:04.132634    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt with IP's: []
	I0408 19:52:04.274142    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt ...
	I0408 19:52:04.274142    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.crt: {Name:mk86d595806f5fd9593a6e193d79c2cb1c05dca6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.276091    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key ...
	I0408 19:52:04.276091    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\client.key: {Name:mkc3c5888c23418b317c452a81f5b7d2259f5729 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.277619    9872 certs.go:363] generating signed profile cert for "minikube": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763
	I0408 19:52:04.277619    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 with IP's: [10.96.0.1 127.0.0.1 10.0.0.1 172.22.37.202]
	I0408 19:52:04.563120    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 ...
	I0408 19:52:04.563120    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763: {Name:mk667d32a0f4f68d06acdd296a200a175e4bd1a6 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.564802    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 ...
	I0408 19:52:04.564802    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763: {Name:mkdcd185403fc77000c9ee1f39034a2451f651ae Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.566319    9872 certs.go:381] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt
	I0408 19:52:04.581800    9872 certs.go:385] copying C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key.fe3d7763 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key
	I0408 19:52:04.582237    9872 certs.go:363] generating signed profile cert for "aggregator": C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key
	I0408 19:52:04.583399    9872 crypto.go:68] Generating cert C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt with IP's: []
	I0408 19:52:04.856137    9872 crypto.go:156] Writing cert to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt ...
	I0408 19:52:04.857105    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt: {Name:mkaeef24c04764c7ea5b50d972f523dc53f8cf7b Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.858641    9872 crypto.go:164] Writing key to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key ...
	I0408 19:52:04.858641    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key: {Name:mk64e80f25151595996ca0ecd780402b8fe7dde0 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:04.859135    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /var/lib/minikube/certs/ca.crt
	I0408 19:52:04.860441    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key -> /var/lib/minikube/certs/ca.key
	I0408 19:52:04.860692    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt -> /var/lib/minikube/certs/proxy-client-ca.crt
	I0408 19:52:04.860769    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key -> /var/lib/minikube/certs/proxy-client-ca.key
	I0408 19:52:04.861132    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt -> /var/lib/minikube/certs/apiserver.crt
	I0408 19:52:04.861396    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key -> /var/lib/minikube/certs/apiserver.key
	I0408 19:52:04.861576    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt -> /var/lib/minikube/certs/proxy-client.crt
	I0408 19:52:04.873859    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key -> /var/lib/minikube/certs/proxy-client.key
	I0408 19:52:04.873859    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem (1338 bytes)
	W0408 19:52:04.874818    9872 certs.go:480] ignoring C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904_empty.pem, impossibly tiny 0 bytes
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem (1675 bytes)
	I0408 19:52:04.874818    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem (1078 bytes)
	I0408 19:52:04.876113    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem (1123 bytes)
	I0408 19:52:04.876528    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem (1675 bytes)
	I0408 19:52:04.877335    9872 certs.go:484] found cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem (1708 bytes)
	I0408 19:52:04.877649    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /usr/share/ca-certificates/79042.pem
	I0408 19:52:04.877876    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt -> /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:04.878076    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem -> /usr/share/ca-certificates/7904.pem
	I0408 19:52:04.879413    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /var/lib/minikube/certs/ca.crt (1111 bytes)
	I0408 19:52:04.932184    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.key --> /var/lib/minikube/certs/ca.key (1675 bytes)
	I0408 19:52:04.978269    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.crt --> /var/lib/minikube/certs/proxy-client-ca.crt (1119 bytes)
	I0408 19:52:05.025583    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\proxy-client-ca.key --> /var/lib/minikube/certs/proxy-client-ca.key (1679 bytes)
	I0408 19:52:05.072756    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.crt --> /var/lib/minikube/certs/apiserver.crt (1424 bytes)
	I0408 19:52:05.122925    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\apiserver.key --> /var/lib/minikube/certs/apiserver.key (1679 bytes)
	I0408 19:52:05.173116    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.crt --> /var/lib/minikube/certs/proxy-client.crt (1147 bytes)
	I0408 19:52:05.216075    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\proxy-client.key --> /var/lib/minikube/certs/proxy-client.key (1675 bytes)
	I0408 19:52:05.260656    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /usr/share/ca-certificates/79042.pem (1708 bytes)
	I0408 19:52:05.318985    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.crt --> /usr/share/ca-certificates/minikubeCA.pem (1111 bytes)
	I0408 19:52:05.372737    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\7904.pem --> /usr/share/ca-certificates/7904.pem (1338 bytes)
	I0408 19:52:05.425642    9872 ssh_runner.go:362] scp memory --> /var/lib/minikube/kubeconfig (738 bytes)
	I0408 19:52:05.470700    9872 ssh_runner.go:195] Run: openssl version
	I0408 19:52:05.480725    9872 command_runner.go:130] > OpenSSL 1.1.1w  11 Sep 2023
	I0408 19:52:05.492856    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/79042.pem && ln -fs /usr/share/ca-certificates/79042.pem /etc/ssl/certs/79042.pem"
	I0408 19:52:05.526240    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.532969    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.533017    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1708 Apr  8 18:09 /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.550363    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/79042.pem
	I0408 19:52:05.562216    9872 command_runner.go:130] > 3ec20f2e
	I0408 19:52:05.577480    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/3ec20f2e.0 || ln -fs /etc/ssl/certs/79042.pem /etc/ssl/certs/3ec20f2e.0"
	I0408 19:52:05.617966    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/minikubeCA.pem && ln -fs /usr/share/ca-certificates/minikubeCA.pem /etc/ssl/certs/minikubeCA.pem"
	I0408 19:52:05.651424    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661058    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.661315    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1111 Apr  8 17:52 /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.673840    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/minikubeCA.pem
	I0408 19:52:05.685141    9872 command_runner.go:130] > b5213941
	I0408 19:52:05.698613    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/b5213941.0 || ln -fs /etc/ssl/certs/minikubeCA.pem /etc/ssl/certs/b5213941.0"
	I0408 19:52:05.734035    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -s /usr/share/ca-certificates/7904.pem && ln -fs /usr/share/ca-certificates/7904.pem /etc/ssl/certs/7904.pem"
	I0408 19:52:05.772338    9872 ssh_runner.go:195] Run: ls -la /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 command_runner.go:130] > -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.779249    9872 certs.go:528] hashing: -rw-r--r-- 1 root root 1338 Apr  8 18:09 /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.801600    9872 ssh_runner.go:195] Run: openssl x509 -hash -noout -in /usr/share/ca-certificates/7904.pem
	I0408 19:52:05.812641    9872 command_runner.go:130] > 51391683
	I0408 19:52:05.824645    9872 ssh_runner.go:195] Run: sudo /bin/bash -c "test -L /etc/ssl/certs/51391683.0 || ln -fs /etc/ssl/certs/7904.pem /etc/ssl/certs/51391683.0"
	I0408 19:52:05.859288    9872 ssh_runner.go:195] Run: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt
	I0408 19:52:05.866958    9872 command_runner.go:130] ! stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867425    9872 certs.go:399] 'apiserver-kubelet-client' cert doesn't exist, likely first start: stat /var/lib/minikube/certs/apiserver-kubelet-client.crt: Process exited with status 1
	stdout:
	
	stderr:
	stat: cannot statx '/var/lib/minikube/certs/apiserver-kubelet-client.crt': No such file or directory
	I0408 19:52:05.867997    9872 kubeadm.go:392] StartCluster: {Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:multinode-0952
00 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p Mou
ntUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 19:52:05.881002    9872 ssh_runner.go:195] Run: docker ps --filter status=paused --filter=name=k8s_.*_(kube-system)_ --format={{.ID}}
	I0408 19:52:05.920919    9872 ssh_runner.go:195] Run: sudo ls /var/lib/kubelet/kubeadm-flags.env /var/lib/kubelet/config.yaml /var/lib/minikube/etcd
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/kubeadm-flags.env': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/kubelet/config.yaml': No such file or directory
	I0408 19:52:05.939942    9872 command_runner.go:130] ! ls: cannot access '/var/lib/minikube/etcd': No such file or directory
	I0408 19:52:05.951811    9872 ssh_runner.go:195] Run: sudo cp /var/tmp/minikube/kubeadm.yaml.new /var/tmp/minikube/kubeadm.yaml
	I0408 19:52:05.980121    9872 ssh_runner.go:195] Run: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	I0408 19:52:05.996462    9872 command_runner.go:130] ! ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997642    9872 kubeadm.go:155] config check failed, skipping stale config cleanup: sudo ls -la /etc/kubernetes/admin.conf /etc/kubernetes/kubelet.conf /etc/kubernetes/controller-manager.conf /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	ls: cannot access '/etc/kubernetes/admin.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/kubelet.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/controller-manager.conf': No such file or directory
	ls: cannot access '/etc/kubernetes/scheduler.conf': No such file or directory
	I0408 19:52:05.997700    9872 kubeadm.go:157] found existing configuration files:
	
	I0408 19:52:06.010705    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf
	I0408 19:52:06.029957    9872 command_runner.go:130] ! grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.030856    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/admin.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/admin.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/admin.conf: No such file or directory
	I0408 19:52:06.044533    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/admin.conf
	I0408 19:52:06.075305    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf
	I0408 19:52:06.094557    9872 command_runner.go:130] ! grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.094557    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/kubelet.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/kubelet.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/kubelet.conf: No such file or directory
	I0408 19:52:06.108863    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/kubelet.conf
	I0408 19:52:06.142541    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.159332    9872 command_runner.go:130] ! grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.160671    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/controller-manager.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/controller-manager.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/controller-manager.conf: No such file or directory
	I0408 19:52:06.172575    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/controller-manager.conf
	I0408 19:52:06.201914    9872 ssh_runner.go:195] Run: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf
	I0408 19:52:06.220520    9872 command_runner.go:130] ! grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.220520    9872 kubeadm.go:163] "https://control-plane.minikube.internal:8443" may not be in /etc/kubernetes/scheduler.conf - will remove: sudo grep https://control-plane.minikube.internal:8443 /etc/kubernetes/scheduler.conf: Process exited with status 2
	stdout:
	
	stderr:
	grep: /etc/kubernetes/scheduler.conf: No such file or directory
	I0408 19:52:06.233311    9872 ssh_runner.go:195] Run: sudo rm -f /etc/kubernetes/scheduler.conf
	I0408 19:52:06.250682    9872 ssh_runner.go:286] Start: /bin/bash -c "sudo env PATH="/var/lib/minikube/binaries/v1.32.2:$PATH" kubeadm init --config /var/tmp/minikube/kubeadm.yaml  --ignore-preflight-errors=DirAvailable--etc-kubernetes-manifests,DirAvailable--var-lib-minikube,DirAvailable--var-lib-minikube-etcd,FileAvailable--etc-kubernetes-manifests-kube-scheduler.yaml,FileAvailable--etc-kubernetes-manifests-kube-apiserver.yaml,FileAvailable--etc-kubernetes-manifests-kube-controller-manager.yaml,FileAvailable--etc-kubernetes-manifests-etcd.yaml,Port-10250,Swap,NumCPU,Mem"
	I0408 19:52:06.733500    9872 kubeadm.go:310] 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:06.733602    9872 command_runner.go:130] ! 	[WARNING Service-Kubelet]: kubelet service is not enabled, please run 'systemctl enable kubelet.service'
	I0408 19:52:20.452992    9872 kubeadm.go:310] [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453079    9872 command_runner.go:130] > [init] Using Kubernetes version: v1.32.2
	I0408 19:52:20.453204    9872 kubeadm.go:310] [preflight] Running pre-flight checks
	I0408 19:52:20.453270    9872 command_runner.go:130] > [preflight] Running pre-flight checks
	I0408 19:52:20.453618    9872 command_runner.go:130] > [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453665    9872 kubeadm.go:310] [preflight] Pulling images required for setting up a Kubernetes cluster
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] This might take a minute or two, depending on the speed of your internet connection
	I0408 19:52:20.453886    9872 command_runner.go:130] > [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.453886    9872 kubeadm.go:310] [preflight] You can also perform this action beforehand using 'kubeadm config images pull'
	I0408 19:52:20.454465    9872 command_runner.go:130] > [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.454465    9872 kubeadm.go:310] [certs] Using certificateDir folder "/var/lib/minikube/certs"
	I0408 19:52:20.457821    9872 out.go:235]   - Generating certificates and keys ...
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing ca certificate authority
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Using existing apiserver certificate and key on disk
	I0408 19:52:20.458068    9872 kubeadm.go:310] [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458068    9872 command_runner.go:130] > [certs] Generating "apiserver-kubelet-client" certificate and key
	I0408 19:52:20.458615    9872 kubeadm.go:310] [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.458812    9872 command_runner.go:130] > [certs] Generating "front-proxy-ca" certificate and key
	I0408 19:52:20.459182    9872 kubeadm.go:310] [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459182    9872 command_runner.go:130] > [certs] Generating "front-proxy-client" certificate and key
	I0408 19:52:20.459298    9872 kubeadm.go:310] [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459298    9872 command_runner.go:130] > [certs] Generating "etcd/ca" certificate and key
	I0408 19:52:20.459401    9872 kubeadm.go:310] [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459501    9872 command_runner.go:130] > [certs] Generating "etcd/server" certificate and key
	I0408 19:52:20.459844    9872 kubeadm.go:310] [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.459844    9872 command_runner.go:130] > [certs] etcd/server serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460139    9872 command_runner.go:130] > [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460139    9872 kubeadm.go:310] [certs] Generating "etcd/peer" certificate and key
	I0408 19:52:20.460469    9872 kubeadm.go:310] [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460469    9872 command_runner.go:130] > [certs] etcd/peer serving cert is signed for DNS names [localhost multinode-095200] and IPs [172.22.37.202 127.0.0.1 ::1]
	I0408 19:52:20.460576    9872 kubeadm.go:310] [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460576    9872 command_runner.go:130] > [certs] Generating "etcd/healthcheck-client" certificate and key
	I0408 19:52:20.460682    9872 kubeadm.go:310] [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "apiserver-etcd-client" certificate and key
	I0408 19:52:20.460739    9872 kubeadm.go:310] [certs] Generating "sa" key and public key
	I0408 19:52:20.460739    9872 command_runner.go:130] > [certs] Generating "sa" key and public key
	I0408 19:52:20.460828    9872 kubeadm.go:310] [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.460828    9872 command_runner.go:130] > [kubeconfig] Using kubeconfig folder "/etc/kubernetes"
	I0408 19:52:20.461009    9872 kubeadm.go:310] [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.461009    9872 command_runner.go:130] > [kubeconfig] Writing "admin.conf" kubeconfig file
	I0408 19:52:20.463323    9872 kubeadm.go:310] [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "super-admin.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "kubelet.conf" kubeconfig file
	I0408 19:52:20.463374    9872 command_runner.go:130] > [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463374    9872 kubeadm.go:310] [kubeconfig] Writing "controller-manager.conf" kubeconfig file
	I0408 19:52:20.463935    9872 command_runner.go:130] > [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.463935    9872 kubeadm.go:310] [kubeconfig] Writing "scheduler.conf" kubeconfig file
	I0408 19:52:20.464109    9872 command_runner.go:130] > [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [etcd] Creating static Pod manifest for local etcd in "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 command_runner.go:130] > [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.464140    9872 kubeadm.go:310] [control-plane] Using manifest folder "/etc/kubernetes/manifests"
	I0408 19:52:20.466393    9872 out.go:235]   - Booting up control plane ...
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-apiserver"
	I0408 19:52:20.466393    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466393    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-controller-manager"
	I0408 19:52:20.466927    9872 kubeadm.go:310] [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467126    9872 command_runner.go:130] > [control-plane] Creating static Pod manifest for "kube-scheduler"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet environment file with flags to file "/var/lib/kubelet/kubeadm-flags.env"
	I0408 19:52:20.467436    9872 command_runner.go:130] > [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467436    9872 kubeadm.go:310] [kubelet-start] Writing kubelet configuration to file "/var/lib/kubelet/config.yaml"
	I0408 19:52:20.467736    9872 command_runner.go:130] > [kubelet-start] Starting the kubelet
	I0408 19:52:20.467736    9872 kubeadm.go:310] [kubelet-start] Starting the kubelet
	I0408 19:52:20.468553    9872 kubeadm.go:310] [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468608    9872 command_runner.go:130] > [wait-control-plane] Waiting for the kubelet to boot up the control plane as static Pods from directory "/etc/kubernetes/manifests"
	I0408 19:52:20.468983    9872 command_runner.go:130] > [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469014    9872 kubeadm.go:310] [kubelet-check] Waiting for a healthy kubelet at http://127.0.0.1:10248/healthz. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [kubelet-check] The kubelet is healthy after 1.501883024s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] Waiting for a healthy API server. This can take up to 4m0s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 kubeadm.go:310] [api-check] The API server is healthy after 7.002699114s
	I0408 19:52:20.469090    9872 command_runner.go:130] > [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469090    9872 kubeadm.go:310] [upload-config] Storing the configuration used in ConfigMap "kubeadm-config" in the "kube-system" Namespace
	I0408 19:52:20.469895    9872 command_runner.go:130] > [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 kubeadm.go:310] [kubelet] Creating a ConfigMap "kubelet-config" in namespace kube-system with the configuration for the kubelets in the cluster
	I0408 19:52:20.469895    9872 command_runner.go:130] > [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.469895    9872 kubeadm.go:310] [upload-certs] Skipping phase. Please see --upload-certs
	I0408 19:52:20.470156    9872 command_runner.go:130] > [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 kubeadm.go:310] [mark-control-plane] Marking the node multinode-095200 as control-plane by adding the labels: [node-role.kubernetes.io/control-plane node.kubernetes.io/exclude-from-external-load-balancers]
	I0408 19:52:20.470156    9872 command_runner.go:130] > [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.470156    9872 kubeadm.go:310] [bootstrap-token] Using token: 1vt15y.1nipqnotix6zr7j6
	I0408 19:52:20.473065    9872 out.go:235]   - Configuring RBAC rules ...
	I0408 19:52:20.473170    9872 command_runner.go:130] > [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473170    9872 kubeadm.go:310] [bootstrap-token] Configuring bootstrap tokens, cluster-info ConfigMap, RBAC Roles
	I0408 19:52:20.473542    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473542    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to get nodes
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow Node Bootstrap tokens to post CSRs in order for nodes to get long term certificate credentials
	I0408 19:52:20.473664    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.473664    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow the csrapprover controller automatically approve CSRs from a Node Bootstrap Token
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Configured RBAC rules to allow certificate rotation for all node client certificates in the cluster
	I0408 19:52:20.474341    9872 kubeadm.go:310] [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474341    9872 command_runner.go:130] > [bootstrap-token] Creating the "cluster-info" ConfigMap in the "kube-public" namespace
	I0408 19:52:20.474919    9872 kubeadm.go:310] [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [kubelet-finalize] Updating "/etc/kubernetes/kubelet.conf" to point to a rotatable kubelet client certificate and key
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: CoreDNS
	I0408 19:52:20.474919    9872 kubeadm.go:310] [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 command_runner.go:130] > [addons] Applied essential addon: kube-proxy
	I0408 19:52:20.474919    9872 kubeadm.go:310] 
	I0408 19:52:20.475495    9872 command_runner.go:130] > Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] Your Kubernetes control-plane has initialized successfully!
	I0408 19:52:20.475495    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] > To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] To start using your cluster, you need to run the following as a regular user:
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.475722    9872 command_runner.go:130] >   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   mkdir -p $HOME/.kube
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo cp -i /etc/kubernetes/admin.conf $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310]   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 command_runner.go:130] >   sudo chown $(id -u):$(id -g) $HOME/.kube/config
	I0408 19:52:20.475722    9872 kubeadm.go:310] 
	I0408 19:52:20.476380    9872 command_runner.go:130] > Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] Alternatively, if you are the root user, you can run:
	I0408 19:52:20.476380    9872 kubeadm.go:310] 
	I0408 19:52:20.476531    9872 command_runner.go:130] >   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476580    9872 kubeadm.go:310]   export KUBECONFIG=/etc/kubernetes/admin.conf
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.476675    9872 command_runner.go:130] > You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 kubeadm.go:310] You should now deploy a pod network to the cluster.
	I0408 19:52:20.476675    9872 command_runner.go:130] > Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 kubeadm.go:310] Run "kubectl apply -f [podnetwork].yaml" with one of the options listed at:
	I0408 19:52:20.476675    9872 command_runner.go:130] >   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310]   https://kubernetes.io/docs/concepts/cluster-administration/addons/
	I0408 19:52:20.476675    9872 kubeadm.go:310] 
	I0408 19:52:20.477288    9872 command_runner.go:130] > You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 kubeadm.go:310] You can now join any number of control-plane nodes by copying certificate authorities
	I0408 19:52:20.477288    9872 command_runner.go:130] > and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] and service account keys on each node and then running the following as root:
	I0408 19:52:20.477288    9872 kubeadm.go:310] 
	I0408 19:52:20.477916    9872 command_runner.go:130] >   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.477916    9872 kubeadm.go:310]   kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.478367    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478399    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 \
	I0408 19:52:20.478542    9872 command_runner.go:130] > 	--control-plane 
	I0408 19:52:20.478572    9872 kubeadm.go:310] 	--control-plane 
	I0408 19:52:20.478666    9872 kubeadm.go:310] 
	I0408 19:52:20.478867    9872 command_runner.go:130] > Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] Then you can join any number of worker nodes by running the following on each as root:
	I0408 19:52:20.478951    9872 kubeadm.go:310] 
	I0408 19:52:20.479527    9872 command_runner.go:130] > kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479527    9872 kubeadm.go:310] kubeadm join control-plane.minikube.internal:8443 --token 1vt15y.1nipqnotix6zr7j6 \
	I0408 19:52:20.479838    9872 command_runner.go:130] > 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479838    9872 kubeadm.go:310] 	--discovery-token-ca-cert-hash sha256:a4ecaf19a4b2b33a44d9c2147bec365a2dc0144c7c28a6a61c16ff4d75b13762 
	I0408 19:52:20.479961    9872 cni.go:84] Creating CNI manager for ""
	I0408 19:52:20.479961    9872 cni.go:136] multinode detected (1 nodes found), recommending kindnet
	I0408 19:52:20.485913    9872 out.go:177] * Configuring CNI (Container Networking Interface) ...
	I0408 19:52:20.502132    9872 ssh_runner.go:195] Run: stat /opt/cni/bin/portmap
	I0408 19:52:20.510742    9872 command_runner.go:130] >   File: /opt/cni/bin/portmap
	I0408 19:52:20.510783    9872 command_runner.go:130] >   Size: 3103192   	Blocks: 6064       IO Block: 4096   regular file
	I0408 19:52:20.510783    9872 command_runner.go:130] > Device: 0,17	Inode: 3500        Links: 1
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: (0755/-rwxr-xr-x)  Uid: (    0/    root)   Gid: (    0/    root)
	I0408 19:52:20.510843    9872 command_runner.go:130] > Access: 2025-04-08 19:50:16.794093100 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Modify: 2025-01-14 09:03:58.000000000 +0000
	I0408 19:52:20.510843    9872 command_runner.go:130] > Change: 2025-04-08 19:50:07.586000000 +0000
	I0408 19:52:20.510901    9872 command_runner.go:130] >  Birth: -
	I0408 19:52:20.511021    9872 cni.go:182] applying CNI manifest using /var/lib/minikube/binaries/v1.32.2/kubectl ...
	I0408 19:52:20.511021    9872 ssh_runner.go:362] scp memory --> /var/tmp/minikube/cni.yaml (2601 bytes)
	I0408 19:52:20.560176    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl apply --kubeconfig=/var/lib/minikube/kubeconfig -f /var/tmp/minikube/cni.yaml
	I0408 19:52:21.272806    9872 command_runner.go:130] > clusterrole.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > serviceaccount/kindnet created
	I0408 19:52:21.273804    9872 command_runner.go:130] > daemonset.apps/kindnet created
	I0408 19:52:21.273804    9872 ssh_runner.go:195] Run: /bin/bash -c "cat /proc/$(pgrep kube-apiserver)/oom_adj"
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig label --overwrite nodes multinode-095200 minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700 minikube.k8s.io/version=v1.35.0 minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8 minikube.k8s.io/name=multinode-095200 minikube.k8s.io/primary=true
	I0408 19:52:21.286808    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl create clusterrolebinding minikube-rbac --clusterrole=cluster-admin --serviceaccount=kube-system:default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.301226    9872 command_runner.go:130] > -16
	I0408 19:52:21.301300    9872 ops.go:34] apiserver oom_adj: -16
	I0408 19:52:21.495807    9872 command_runner.go:130] > node/multinode-095200 labeled
	I0408 19:52:21.499081    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/minikube-rbac created
	I0408 19:52:21.510963    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:21.641405    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.012336    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.137557    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:22.512386    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:22.625725    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.011952    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.129972    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:23.513273    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:23.636509    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.012428    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.194570    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:24.513624    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:24.631069    9872 command_runner.go:130] ! Error from server (NotFound): serviceaccounts "default" not found
	I0408 19:52:25.012618    9872 ssh_runner.go:195] Run: sudo /var/lib/minikube/binaries/v1.32.2/kubectl get sa default --kubeconfig=/var/lib/minikube/kubeconfig
	I0408 19:52:25.188164    9872 command_runner.go:130] > NAME      SECRETS   AGE
	I0408 19:52:25.188164    9872 command_runner.go:130] > default   0         0s
	I0408 19:52:25.188403    9872 kubeadm.go:1113] duration metric: took 3.9145677s to wait for elevateKubeSystemPrivileges
	I0408 19:52:25.188521    9872 kubeadm.go:394] duration metric: took 19.320374s to StartCluster
	I0408 19:52:25.188593    9872 settings.go:142] acquiring lock: {Name:mke99fb8c09012609ce6804e7dfd4d68f5541df7 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.188928    9872 settings.go:150] Updating kubeconfig:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:25.191242    9872 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\kubeconfig: {Name:mk966a7640504e03827322930a51a762b5508893 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 19:52:25.192715    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml"
	I0408 19:52:25.192787    9872 start.go:235] Will wait 6m0s for node &{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}
	I0408 19:52:25.193041    9872 addons.go:511] enable addons start: toEnable=map[ambassador:false amd-gpu-device-plugin:false auto-pause:false cloud-spanner:false csi-hostpath-driver:false dashboard:false default-storageclass:true efk:false freshpod:false gcp-auth:false gvisor:false headlamp:false inaccel:false ingress:false ingress-dns:false inspektor-gadget:false istio:false istio-provisioner:false kong:false kubeflow:false kubevirt:false logviewer:false metallb:false metrics-server:false nvidia-device-plugin:false nvidia-driver-installer:false nvidia-gpu-device-plugin:false olm:false pod-security-policy:false portainer:false registry:false registry-aliases:false registry-creds:false storage-provisioner:true storage-provisioner-gluster:false storage-provisioner-rancher:false volcano:false volumesnapshots:false yakd:false]
	I0408 19:52:25.193184    9872 addons.go:69] Setting storage-provisioner=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 addons.go:238] Setting addon storage-provisioner=true in "multinode-095200"
	I0408 19:52:25.193184    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:25.193184    9872 addons.go:69] Setting default-storageclass=true in profile "multinode-095200"
	I0408 19:52:25.193184    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:25.193184    9872 addons_storage_classes.go:33] enableOrDisableStorageClasses default-storageclass=true on "multinode-095200"
	I0408 19:52:25.194651    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.195258    9872 out.go:177] * Verifying Kubernetes components...
	I0408 19:52:25.195383    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:25.213493    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:52:25.510742    9872 command_runner.go:130] > apiVersion: v1
	I0408 19:52:25.510742    9872 command_runner.go:130] > data:
	I0408 19:52:25.510901    9872 command_runner.go:130] >   Corefile: |
	I0408 19:52:25.510901    9872 command_runner.go:130] >     .:53 {
	I0408 19:52:25.510901    9872 command_runner.go:130] >         errors
	I0408 19:52:25.510901    9872 command_runner.go:130] >         health {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            lameduck 5s
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         ready
	I0408 19:52:25.510901    9872 command_runner.go:130] >         kubernetes cluster.local in-addr.arpa ip6.arpa {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            pods insecure
	I0408 19:52:25.510901    9872 command_runner.go:130] >            fallthrough in-addr.arpa ip6.arpa
	I0408 19:52:25.510901    9872 command_runner.go:130] >            ttl 30
	I0408 19:52:25.510901    9872 command_runner.go:130] >         }
	I0408 19:52:25.510901    9872 command_runner.go:130] >         prometheus :9153
	I0408 19:52:25.510901    9872 command_runner.go:130] >         forward . /etc/resolv.conf {
	I0408 19:52:25.510901    9872 command_runner.go:130] >            max_concurrent 1000
	I0408 19:52:25.511029    9872 command_runner.go:130] >         }
	I0408 19:52:25.511089    9872 command_runner.go:130] >         cache 30 {
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable success cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >            disable denial cluster.local
	I0408 19:52:25.511132    9872 command_runner.go:130] >         }
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loop
	I0408 19:52:25.511132    9872 command_runner.go:130] >         reload
	I0408 19:52:25.511132    9872 command_runner.go:130] >         loadbalance
	I0408 19:52:25.511132    9872 command_runner.go:130] >     }
	I0408 19:52:25.511132    9872 command_runner.go:130] > kind: ConfigMap
	I0408 19:52:25.511132    9872 command_runner.go:130] > metadata:
	I0408 19:52:25.511234    9872 command_runner.go:130] >   creationTimestamp: "2025-04-08T19:52:19Z"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   name: coredns
	I0408 19:52:25.511234    9872 command_runner.go:130] >   namespace: kube-system
	I0408 19:52:25.511234    9872 command_runner.go:130] >   resourceVersion: "224"
	I0408 19:52:25.511234    9872 command_runner.go:130] >   uid: a0e1b4d3-9085-4e51-8233-fba9aaae1871
	I0408 19:52:25.511484    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig -n kube-system get configmap coredns -o yaml | sed -e '/^        forward . \/etc\/resolv.conf.*/i \        hosts {\n           172.22.32.1 host.minikube.internal\n           fallthrough\n        }' -e '/^        errors *$/i \        log' | sudo /var/lib/minikube/binaries/v1.32.2/kubectl --kubeconfig=/var/lib/minikube/kubeconfig replace -f -"
	I0408 19:52:25.670547    9872 ssh_runner.go:195] Run: sudo systemctl start kubelet
	I0408 19:52:26.177191    9872 command_runner.go:130] > configmap/coredns replaced
	I0408 19:52:26.177324    9872 start.go:971] {"host.minikube.internal": 172.22.32.1} host record injected into CoreDNS's ConfigMap
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.178594    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:26.179156    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.179483    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:26.181377    9872 cert_rotation.go:140] Starting client certificate rotation controller
	I0408 19:52:26.181738    9872 envvar.go:172] "Feature gate default state" feature="WatchListClient" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsAllowCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="ClientsPreferCBOR" enabled=false
	I0408 19:52:26.181812    9872 envvar.go:172] "Feature gate default state" feature="InformerResourceVersion" enabled=false
	I0408 19:52:26.182441    9872 node_ready.go:35] waiting up to 6m0s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:26.182551    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.182682    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.182808    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182808    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.182942    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.182942    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.182808    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.182942    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.211553    9872 round_trippers.go:581] Response Status: 200 OK in 28 milliseconds
	I0408 19:52:26.211553    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Audit-Id: 54192402-5f11-4374-8cba-57131e79787e
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.211553    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.211553    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.212325    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:26.215317    9872 round_trippers.go:581] Response Status: 200 OK in 32 milliseconds
	I0408 19:52:26.215438    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Audit-Id: 3f5d4ad1-7b4f-4388-bc74-8376b19f7f37
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.215438    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.215438    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.215543    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.215543    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.215611    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 02 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.215980    9872 deployment.go:111] "Request Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 30 38 00  42 08 08 f3 fc d5 bf 06  |82.3408.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.216124    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.216192    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.216242    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.216291    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.216291    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.240196    9872 round_trippers.go:581] Response Status: 200 OK in 23 milliseconds
	I0408 19:52:26.240196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Audit-Id: 51e1c9ef-c0b6-4fa7-ab17-f3c35ca1daaf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.240196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.240196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.240962    9872 deployment.go:111] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 34 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3448.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 02 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.682895    9872 deployment.go:95] "Request Body" body=""
	I0408 19:52:26.682895    9872 type.go:168] "Request Body" body=""
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/apps/v1/namespaces/kube-system/deployments/coredns/scale
	I0408 19:52:26.682895    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.682895    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:26.682895    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:26.687612    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:26.687612    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Length: 144
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Audit-Id: 68f2dafe-f0a5-48fa-b98c-a4bfb21a347f
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.687612    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.687612    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.687612    9872 deployment.go:95] "Response Body" body=<
		00000000  6b 38 73 00 0a 17 0a 0e  61 75 74 6f 73 63 61 6c  |k8s.....autoscal|
		00000010  69 6e 67 2f 76 31 12 05  53 63 61 6c 65 12 6d 0a  |ing/v1..Scale.m.|
		00000020  51 0a 07 63 6f 72 65 64  6e 73 12 00 1a 0b 6b 75  |Q..coredns....ku|
		00000030  62 65 2d 73 79 73 74 65  6d 22 00 2a 24 30 61 37  |be-system".*$0a7|
		00000040  36 37 33 32 36 2d 65 35  62 61 2d 34 36 35 36 2d  |67326-e5ba-4656-|
		00000050  38 64 37 35 2d 38 63 37  39 61 32 38 34 65 36 33  |8d75-8c79a284e63|
		00000060  38 32 03 33 35 34 38 00  42 08 08 f3 fc d5 bf 06  |82.3548.B.......|
		00000070  10 00 12 02 08 01 1a 14  08 01 12 10 6b 38 73 2d  |............k8s-|
		00000080  61 70 70 3d 6b 75 62 65  2d 64 6e 73 1a 00 22 00  |app=kube-dns..".|
	 >
	I0408 19:52:26.687612    9872 kapi.go:214] "coredns" deployment in "kube-system" namespace and "multinode-095200" context rescaled to 1 replicas
	I0408 19:52:26.688226    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:26.688386    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:26.688386    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:26 GMT
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Audit-Id: fd2205b3-9af0-48b4-a923-7cb6d75b3b7a
	I0408 19:52:26.688386    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:26.688497    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:26.688900    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.183676    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.183676    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.183676    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.183676    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.183676    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.188594    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:27.188675    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.188675    9872 round_trippers.go:587]     Audit-Id: 407c9471-0584-4344-b935-368f95a85e26
	I0408 19:52:27.188764    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.188810    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.188810    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.189290    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:27.636758    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.637188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:27.639569    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:27.639885    9872 out.go:177]   - Using image gcr.io/k8s-minikube/storage-provisioner:v5
	I0408 19:52:27.640899    9872 loader.go:402] Config loaded from file:  C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 19:52:27.641560    9872 kapi.go:59] client config for multinode-095200: &rest.Config{Host:"https://172.22.37.202:8443", APIPath:"", ContentConfig:rest.ContentConfig{AcceptContentTypes:"", ContentType:"", GroupVersion:(*schema.GroupVersion)(nil), NegotiatedSerializer:runtime.NegotiatedSerializer(nil)}, Username:"", Password:"", BearerToken:"", BearerTokenFile:"", Impersonate:rest.ImpersonationConfig{UserName:"", UID:"", Groups:[]string(nil), Extra:map[string][]string(nil)}, AuthProvider:<nil>, AuthConfigPersister:rest.AuthProviderConfigPersister(nil), ExecProvider:<nil>, TLSClientConfig:rest.sanitizedTLSClientConfig{Insecure:false, ServerName:"", CertFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.crt", KeyFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\multinode-095200\\client.key", CAFile:"C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\ca.crt", CertData:[]uint8(nil), KeyData:[]uint8(nil), CAD
ata:[]uint8(nil), NextProtos:[]string(nil)}, UserAgent:"", DisableCompression:false, Transport:http.RoundTripper(nil), WrapTransport:(transport.WrapperFunc)(0x2ff92e0), QPS:0, Burst:0, RateLimiter:flowcontrol.RateLimiter(nil), WarningHandler:rest.WarningHandler(nil), Timeout:0, Dial:(func(context.Context, string, string) (net.Conn, error))(nil), Proxy:(func(*http.Request) (*url.URL, error))(nil)}
	I0408 19:52:27.642240    9872 addons.go:238] Setting addon default-storageclass=true in "multinode-095200"
	I0408 19:52:27.642240    9872 host.go:66] Checking if "multinode-095200" exists ...
	I0408 19:52:27.643481    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.644249    9872 addons.go:435] installing /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:27.644249    9872 ssh_runner.go:362] scp memory --> /etc/kubernetes/addons/storage-provisioner.yaml (2676 bytes)
	I0408 19:52:27.644249    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:27.682956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:27.682956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:27.682956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:27.682956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:27.682956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:27.687243    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:27.687243    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:27.687243    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:27 GMT
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Audit-Id: 5ad74de0-b774-4474-9839-e5dc4f93760a
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:27.687243    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:27.687243    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.183346    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.183346    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.183346    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.183346    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.183346    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.189912    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:28.189912    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Audit-Id: 1b4cab24-4179-4ca9-8e1c-bc3b86feea53
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.189912    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.189912    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.190420    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:28.190612    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:28.682768    9872 type.go:168] "Request Body" body=""
	I0408 19:52:28.683147    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:28.683147    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:28.683147    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:28.683147    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:28.686587    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:28.686630    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:28.686630    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:28 GMT
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Audit-Id: 4c2976e8-7802-426c-962c-6c86331b69d9
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:28.686630    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:28.686884    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.182917    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.182917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.182917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.182917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.182917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.187958    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:29.188052    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.188052    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Audit-Id: f991c04b-7a3a-4d46-890c-c3287d37e8cd
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.188052    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.188052    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:29.684691    9872 type.go:168] "Request Body" body=""
	I0408 19:52:29.685069    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:29.685158    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:29.685158    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:29.685214    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:29.705173    9872 round_trippers.go:581] Response Status: 200 OK in 19 milliseconds
	I0408 19:52:29.705248    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:29.705248    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:29.705330    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:29 GMT
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Audit-Id: 04e83bbf-b180-4b89-8265-a75263ad557b
	I0408 19:52:29.705330    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:29.705389    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:29.706648    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.183242    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.183242    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.183242    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.183242    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.183242    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.194652    9872 round_trippers.go:581] Response Status: 200 OK in 11 milliseconds
	I0408 19:52:30.194652    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.194652    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Audit-Id: 25f9b8ab-df0e-428c-9a51-201d036cec45
	I0408 19:52:30.194652    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.194652    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:30.195271    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.220251    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:30.491258    9872 addons.go:435] installing /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:30.491258    9872 ssh_runner.go:362] scp storageclass/storageclass.yaml --> /etc/kubernetes/addons/storageclass.yaml (271 bytes)
	I0408 19:52:30.491258    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200 ).state
	I0408 19:52:30.683365    9872 type.go:168] "Request Body" body=""
	I0408 19:52:30.683365    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:30.683365    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:30.683365    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:30.683365    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:30.688414    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:30.688414    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:30.688414    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:30 GMT
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Audit-Id: db3bb30a-ec54-4fb4-bce1-aa0d8917b070
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:30.688596    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:30.688596    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:30.688872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.183216    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.183216    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.183216    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.183216    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.183216    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.186674    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.186813    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.186813    9872 round_trippers.go:587]     Audit-Id: 39de4011-e5a0-45ee-bf2e-53f91799470e
	I0408 19:52:31.186898    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.187231    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:31.683374    9872 type.go:168] "Request Body" body=""
	I0408 19:52:31.683374    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:31.683374    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:31.683374    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:31.683374    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:31.687557    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:31.687557    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Audit-Id: ab0082d6-550d-4b5b-a0d0-7320d4c8e138
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:31.687615    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:31.687615    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:31 GMT
	I0408 19:52:31.688075    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.183081    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.183081    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.183081    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.183081    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.183081    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.187196    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:32.187196    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Audit-Id: 7ee42cf8-f19d-40e3-b644-d4735ca35039
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.187196    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.187196    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.187196    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.683023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:32.683023    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:32.683023    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:32.683023    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:32.683023    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:32.686384    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:32.686384    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:32.686457    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:32 GMT
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Audit-Id: e6227103-24c8-43b2-a8a5-de28b51d8b40
	I0408 19:52:32.686457    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:32.686868    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:32.687142    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:32.894718    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200 ).networkadapters[0]).ipaddresses[0]
	I0408 19:52:33.083456    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:33.084031    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:33.084327    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:33.182759    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.182759    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.182759    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.182759    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.182759    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.186015    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.186813    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Audit-Id: 685df91c-fa19-4a2a-8a9a-726bf33ecd8d
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.186869    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.186869    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.187180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.222347    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storage-provisioner.yaml
	I0408 19:52:33.683358    9872 type.go:168] "Request Body" body=""
	I0408 19:52:33.683358    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:33.683358    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:33.683358    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:33.683358    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:33.687733    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:33.687802    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Audit-Id: 1be982d6-c0b7-4fcb-a9eb-c2cc9c76f69e
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:33.687802    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:33.687802    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:33 GMT
	I0408 19:52:33.688222    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:33.837930    9872 command_runner.go:130] > serviceaccount/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > clusterrolebinding.rbac.authorization.k8s.io/storage-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > role.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.837930    9872 command_runner.go:130] > rolebinding.rbac.authorization.k8s.io/system:persistent-volume-provisioner created
	I0408 19:52:33.838058    9872 command_runner.go:130] > endpoints/k8s.io-minikube-hostpath created
	I0408 19:52:33.838058    9872 command_runner.go:130] > pod/storage-provisioner created
	I0408 19:52:34.183038    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.183038    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.183038    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.183038    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.183038    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.188141    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:34.188141    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Audit-Id: 81526468-0d6f-441d-80aa-278bb1a3044b
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.188141    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.188141    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.189300    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.682805    9872 type.go:168] "Request Body" body=""
	I0408 19:52:34.682805    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:34.682805    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:34.682805    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:34.682805    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:34.687300    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:34.687300    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Audit-Id: cb94b032-b7e6-4351-a540-d1c5a8f4e071
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:34.687300    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:34.687300    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:34 GMT
	I0408 19:52:34.687788    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:34.687907    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:35.183130    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.183130    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.183130    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.183130    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.183130    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.186231    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:35.186747    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Audit-Id: 4a446a89-7f50-4bc2-a302-4a30314b219a
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.186747    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.186747    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.187590    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stdout =====>] : 172.22.37.202
	
	I0408 19:52:35.554880    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:35.555861    9872 sshutil.go:53] new ssh client: &{IP:172.22.37.202 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200\id_rsa Username:docker}
	I0408 19:52:35.682665    9872 type.go:168] "Request Body" body=""
	I0408 19:52:35.682665    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:35.682665    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.682665    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.682665    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.686998    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:35.687056    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Audit-Id: acb68525-f81c-4d5b-bd5e-6196e6feddcd
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.687056    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.687056    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.687131    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.687722    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:35.701543    9872 ssh_runner.go:195] Run: sudo KUBECONFIG=/var/lib/minikube/kubeconfig /var/lib/minikube/binaries/v1.32.2/kubectl apply -f /etc/kubernetes/addons/storageclass.yaml
	I0408 19:52:35.898260    9872 command_runner.go:130] > storageclass.storage.k8s.io/standard created
	I0408 19:52:35.898672    9872 type.go:204] "Request Body" body=""
	I0408 19:52:35.898829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses
	I0408 19:52:35.898915    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.898915    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.898915    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.905500    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:35.905500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.905500    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Content-Length: 957
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.905500    9872 round_trippers.go:587]     Audit-Id: 8c45acb3-30e7-4e72-af67-6f688092ffe6
	I0408 19:52:35.905500    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 25 0a 11  73 74 6f 72 61 67 65 2e  |k8s..%..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 10 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 4c  69 73 74 12 8b 07 0a 09  |geClassList.....|
		00000030  0a 00 12 03 33 38 31 1a  00 12 fd 06 0a cd 06 0a  |....381.........|
		00000040  08 73 74 61 6e 64 61 72  64 12 00 1a 00 22 00 2a  |.standard....".*|
		00000050  24 30 31 33 65 35 63 38  33 2d 34 39 32 31 2d 34  |$013e5c83-4921-4|
		00000060  33 64 30 2d 39 61 63 37  2d 34 38 31 30 37 36 32  |3d0-9ac7-4810762|
		00000070  63 35 61 35 63 32 03 33  38 31 38 00 42 08 08 83  |c5a5c2.3818.B...|
		00000080  fd d5 bf 06 10 00 5a 2f  0a 1f 61 64 64 6f 6e 6d  |......Z/..addonm|
		00000090  61 6e 61 67 65 72 2e 6b  75 62 65 72 6e 65 74 65  |anager.kubernete|
		000000a0  73 2e 69 6f 2f 6d 6f 64  65 12 0c 45 6e 73 75 72  |s.io/mode..Ensur|
		000000b0  65 45 78 69 73 74 73 62  b7 02 0a 30 6b 75 62 65  |eExistsb...0kube|
		000000c0  63 74 6c 2e 6b 75 62 65  72 6e 65 74 65 73 2e 69  |ctl.kubernetes. [truncated 3713 chars]
	 >
	I0408 19:52:35.906261    9872 type.go:267] "Request Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.906312    9872 round_trippers.go:470] PUT https://172.22.37.202:8443/apis/storage.k8s.io/v1/storageclasses/standard
	I0408 19:52:35.906312    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.906312    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:35.906312    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:35.917615    9872 round_trippers.go:581] Response Status: 200 OK in 10 milliseconds
	I0408 19:52:35.917615    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:35.917680    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Content-Length: 939
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:35 GMT
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Audit-Id: 7e9d4d23-ba7c-4a5c-b9e2-f8fa77b5c6b3
	I0408 19:52:35.917680    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:35.917680    9872 type.go:267] "Response Body" body=<
		00000000  6b 38 73 00 0a 21 0a 11  73 74 6f 72 61 67 65 2e  |k8s..!..storage.|
		00000010  6b 38 73 2e 69 6f 2f 76  31 12 0c 53 74 6f 72 61  |k8s.io/v1..Stora|
		00000020  67 65 43 6c 61 73 73 12  fd 06 0a cd 06 0a 08 73  |geClass........s|
		00000030  74 61 6e 64 61 72 64 12  00 1a 00 22 00 2a 24 30  |tandard....".*$0|
		00000040  31 33 65 35 63 38 33 2d  34 39 32 31 2d 34 33 64  |13e5c83-4921-43d|
		00000050  30 2d 39 61 63 37 2d 34  38 31 30 37 36 32 63 35  |0-9ac7-4810762c5|
		00000060  61 35 63 32 03 33 38 31  38 00 42 08 08 83 fd d5  |a5c2.3818.B.....|
		00000070  bf 06 10 00 5a 2f 0a 1f  61 64 64 6f 6e 6d 61 6e  |....Z/..addonman|
		00000080  61 67 65 72 2e 6b 75 62  65 72 6e 65 74 65 73 2e  |ager.kubernetes.|
		00000090  69 6f 2f 6d 6f 64 65 12  0c 45 6e 73 75 72 65 45  |io/mode..EnsureE|
		000000a0  78 69 73 74 73 62 b7 02  0a 30 6b 75 62 65 63 74  |xistsb...0kubect|
		000000b0  6c 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |l.kubernetes.io/|
		000000c0  6c 61 73 74 2d 61 70 70  6c 69 65 64 2d 63 6f 6e  |last-applied-co [truncated 3632 chars]
	 >
	I0408 19:52:35.921256    9872 out.go:177] * Enabled addons: storage-provisioner, default-storageclass
	I0408 19:52:35.924712    9872 addons.go:514] duration metric: took 10.7316569s for enable addons: enabled=[storage-provisioner default-storageclass]
	I0408 19:52:36.182600    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.182600    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.182600    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.182600    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.182600    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.187832    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:36.187832    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Audit-Id: b766a625-37d8-4b01-b586-ae309c408ca4
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.187832    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.187832    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.187832    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.683162    9872 type.go:168] "Request Body" body=""
	I0408 19:52:36.683162    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:36.683162    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:36.683162    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:36.683162    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:36.687176    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:36.687176    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:36.687176    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:36 GMT
	I0408 19:52:36.687176    9872 round_trippers.go:587]     Audit-Id: 3402846c-6af9-4c16-a765-fa78b2a1dfb6
	I0408 19:52:36.688168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:36.688168    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:37.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.187433    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.187433    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.187433    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Audit-Id: 09189e65-0628-45b8-a6ef-abc67f812734
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.187433    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.187433    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:37.683192    9872 type.go:168] "Request Body" body=""
	I0408 19:52:37.683348    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:37.683348    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:37.683558    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:37.683558    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:37.688384    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:37.688519    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Audit-Id: cc629cdf-9f9f-47a1-b512-ac0ed45ba62f
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:37.688519    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:37.688519    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:37 GMT
	I0408 19:52:37.689042    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.183873    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.183977    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.183977    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.184063    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.184063    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.188119    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.188119    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Audit-Id: 6697e232-1362-4d20-81ab-a14590db1998
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.188119    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.188119    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.188628    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:38.682605    9872 type.go:168] "Request Body" body=""
	I0408 19:52:38.682605    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:38.682605    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:38.682605    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:38.682605    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:38.687050    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:38.687127    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:38.687127    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:38 GMT
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Audit-Id: e1050664-a6dc-4a02-8ad0-4801f6b4e382
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:38.687127    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:38.687127    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.184023    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.184126    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.184126    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.184126    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.184210    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.190432    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.190592    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.190592    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Audit-Id: c019c77a-f3c7-4331-b3b0-be9357309243
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.190592    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.190794    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:39.190794    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:39.684634    9872 type.go:168] "Request Body" body=""
	I0408 19:52:39.684768    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:39.684835    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:39.684835    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:39.684835    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:39.691227    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:39.691271    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:39.691347    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:39.691347    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:39 GMT
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Audit-Id: cc19f481-63e6-4b66-a135-c53a85419a5c
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:39.691440    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:39.693012    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.182889    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.182889    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.182889    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.182889    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.182889    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.188161    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:40.188161    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.188161    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Audit-Id: fa852164-d0ec-42c9-a38d-01b7c2ff29b6
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.188161    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.188765    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:40.682822    9872 type.go:168] "Request Body" body=""
	I0408 19:52:40.682822    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:40.682822    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:40.682822    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:40.682822    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:40.687205    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:40.687304    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:40.687304    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:40 GMT
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Audit-Id: 06bb383f-1b06-4cbd-b98f-b8f2b60d9cb9
	I0408 19:52:40.687304    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:40.687417    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:40.687417    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:40.687851    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.183297    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.183297    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.183297    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.183297    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.183297    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.187447    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.187447    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Audit-Id: 922ac887-ca77-4280-8aab-f5013548b0b9
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.187447    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.187447    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.187995    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.683053    9872 type.go:168] "Request Body" body=""
	I0408 19:52:41.683053    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:41.683053    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:41.683053    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:41.683053    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:41.687500    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:41.687500    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:41.687500    9872 round_trippers.go:587]     Audit-Id: 9cc8da6a-cdea-4c7d-b8f4-1687f1010710
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:41.687614    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:41.687658    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:41.687694    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:41 GMT
	I0408 19:52:41.688478    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:41.688764    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:42.183191    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.183191    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.183191    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.183191    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.183191    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.188654    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:42.188654    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Audit-Id: f2673d2c-6eea-42f9-b6e0-cbc479c30386
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.188654    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.188654    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.189014    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:42.683611    9872 type.go:168] "Request Body" body=""
	I0408 19:52:42.683611    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:42.683611    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:42.683611    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:42.683611    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:42.688409    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:42.688409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:42.688409    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:42 GMT
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Audit-Id: bb5532b4-31be-4ed5-84a8-d49f84c96dca
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:42.688409    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:42.688409    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.182585    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.182585    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.182585    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.182585    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.182585    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.189673    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:43.189673    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.189673    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.189673    9872 round_trippers.go:587]     Audit-Id: 827b82d9-8e56-4889-832f-b1258d7cdc03
	I0408 19:52:43.190225    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.683569    9872 type.go:168] "Request Body" body=""
	I0408 19:52:43.683569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:43.683569    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:43.683569    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:43.683569    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:43.689225    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:43.689328    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Audit-Id: 2f5ba42c-a181-42a4-9d9c-75645acf5b44
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:43.689328    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:43.689328    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:43 GMT
	I0408 19:52:43.689729    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:43.690049    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:44.183457    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.183457    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.183457    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.183457    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.183457    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.188184    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.188184    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Audit-Id: 475254e6-fcc1-4bba-a45e-a7bc99798a56
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.188184    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.188184    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.189439    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:44.684126    9872 type.go:168] "Request Body" body=""
	I0408 19:52:44.684367    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:44.684367    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:44.684367    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:44.684839    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:44.689759    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:44.689759    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:44.689759    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:44.689759    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:44.690302    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:44 GMT
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Audit-Id: 230c3882-b24f-449c-89d6-edfa5af13052
	I0408 19:52:44.690326    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:44.690955    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.182965    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.182965    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.182965    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.182965    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.182965    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.186677    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:45.186677    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Audit-Id: 72c5fc97-866c-4bb0-bb89-774ddc86ddae
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.186677    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.186677    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.189810    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.683712    9872 type.go:168] "Request Body" body=""
	I0408 19:52:45.683917    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:45.683917    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:45.683917    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:45.683917    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:45.690276    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:45.690276    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:45.690276    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:45 GMT
	I0408 19:52:45.690276    9872 round_trippers.go:587]     Audit-Id: 70b04249-4b04-4849-9f00-5c5984c87d48
	I0408 19:52:45.690829    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:45.690960    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:46.182956    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.182956    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.182956    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.182956    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.182956    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.188344    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:46.188409    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.188409    9872 round_trippers.go:587]     Audit-Id: c2aaeaa6-5bfa-499f-80fd-3f73220f122f
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.188466    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.188466    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.188522    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.188578    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:46.682976    9872 type.go:168] "Request Body" body=""
	I0408 19:52:46.683567    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:46.683567    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:46.683567    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:46.683567    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:46.687411    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:46.687411    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Audit-Id: 82d735a2-cce0-4376-911a-1fa6144a8df6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:46.687411    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:46.687411    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:46 GMT
	I0408 19:52:46.687809    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.183402    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.183402    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.183402    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.183402    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.183402    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.187802    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:47.187941    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Audit-Id: 7a838f4a-2004-4862-94a9-a3fcc2be0cf4
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.188021    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.188021    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.188080    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.188405    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:47.683256    9872 type.go:168] "Request Body" body=""
	I0408 19:52:47.683256    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:47.683256    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:47.683817    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:47.683817    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:47.688981    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:47.688981    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Audit-Id: b48b8e95-da31-4bd5-aaf9-c5a282493a0e
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:47.688981    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:47.688981    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:47 GMT
	I0408 19:52:47.688981    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.183351    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.183351    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.183351    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.183351    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.183351    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.188122    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.188122    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Audit-Id: 1644814b-1d08-4d1f-8281-a92ed259fd58
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.188122    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.188122    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.188362    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 bb 22 0a c6 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 30  35 38 00 42 08 08 f0 fc  |fba22.3058.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20924 chars]
	 >
	I0408 19:52:48.188362    9872 node_ready.go:53] node "multinode-095200" has status "Ready":"False"
	I0408 19:52:48.683635    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.683635    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.683635    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.683635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.683635    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.687776    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:48.688368    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Audit-Id: 79352c14-e4a5-4c93-8c7b-33c7b6f857f8
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.688368    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.688368    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.689180    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:48.689384    9872 node_ready.go:49] node "multinode-095200" has status "Ready":"True"
	I0408 19:52:48.689440    9872 node_ready.go:38] duration metric: took 22.506825s for node "multinode-095200" to be "Ready" ...
	I0408 19:52:48.689440    9872 pod_ready.go:36] extra waiting up to 6m0s for all system-critical pods including labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:48.689611    9872 type.go:204] "Request Body" body=""
	I0408 19:52:48.689705    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:48.689733    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.689733    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.689733    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.698535    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:48.698632    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Audit-Id: ff6550e3-9659-433b-aee2-b28b42988450
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.698632    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.698632    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.700253    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 ff c5 02 0a  09 0a 00 12 03 33 39 37  |ist..........397|
		00000020  1a 00 12 d7 26 0a 8b 19  0a 18 63 6f 72 65 64 6e  |....&.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 33 39  |075ba936e08e2.39|
		00000090  37 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |78.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205160 chars]
	 >
	I0408 19:52:48.700985    9872 pod_ready.go:79] waiting up to 6m0s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:48.700985    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.700985    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:48.700985    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.700985    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.700985    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.703795    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:48.704293    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Audit-Id: 5a96a7e1-3820-4c3e-bcb3-5b555c8a5b79
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.704293    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.704293    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.704293    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:48.704293    9872 type.go:168] "Request Body" body=""
	I0408 19:52:48.704293    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:48.704293    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:48.704293    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:48.704293    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:48.712073    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:48.712073    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:48 GMT
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Audit-Id: a010fece-d26c-4779-888f-6cc8a0b8ce2e
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:48.712073    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:48.712073    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:48.712550    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.201085    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.201085    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.201085    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.201085    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.201085    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.206710    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.206792    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.206792    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.206792    9872 round_trippers.go:587]     Audit-Id: 5dd0fa75-00be-4a7a-b8f9-d49a398d8143
	I0408 19:52:49.207140    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.207397    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.207461    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.207461    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.207547    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.207547    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.214197    9872 round_trippers.go:581] Response Status: 200 OK in 6 milliseconds
	I0408 19:52:49.214197    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Audit-Id: af0a36c1-fb0e-4b8f-8548-c3c2bb5be739
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.214197    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.214197    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.214837    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:49.701274    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.701274    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:49.701274    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.701274    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.701274    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.717563    9872 round_trippers.go:581] Response Status: 200 OK in 16 milliseconds
	I0408 19:52:49.717674    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Audit-Id: 786182b2-3539-4258-b579-0e7cc5ab726b
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.717744    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.717744    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.718128    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:49.718498    9872 type.go:168] "Request Body" body=""
	I0408 19:52:49.718569    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:49.718619    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:49.718635    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:49.718658    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:49.723872    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:49.723872    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Audit-Id: bcda245e-3a8d-4f59-826a-aababc3b183a
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:49.723872    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:49.723872    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:49 GMT
	I0408 19:52:49.723872    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.201972    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.201972    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.201972    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.201972    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.201972    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.206302    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.206962    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Audit-Id: d259992f-afa2-4582-b900-37ed9f001187
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.206962    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.206962    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.207414    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.207566    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.207566    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.207566    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.207566    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.207566    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.211455    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.211455    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.211455    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Audit-Id: 8b7aeb10-cb82-4c5e-9a90-52fcce177355
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.211455    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.212910    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.701797    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.701797    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:50.701797    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.701797    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.701797    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.706488    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:50.706488    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Audit-Id: 68786f41-9234-43c0-abac-f11c260ec5b4
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.706488    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.706488    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.706488    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d7 26 0a 8b 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.&.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 33 39 37 38 00  |ba936e08e2.3978.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 23542 chars]
	 >
	I0408 19:52:50.707157    9872 type.go:168] "Request Body" body=""
	I0408 19:52:50.707157    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:50.707157    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:50.707157    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:50.707157    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:50.710818    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:50.711075    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:50 GMT
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Audit-Id: 820fd954-b74e-4355-a560-456f3dfebd7c
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:50.711075    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:50.711075    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:50.711302    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 c2 21 0a fc 10 0a 10  6d 75 6c 74 69 6e 6f 64  |..!.....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 33 39  31 38 00 42 08 08 f0 fc  |fba22.3918.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 20299 chars]
	 >
	I0408 19:52:50.711540    9872 pod_ready.go:103] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"False"
	I0408 19:52:51.201714    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.202195    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/coredns-668d6bf9bc-4tn68
	I0408 19:52:51.202195    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.202195    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.202195    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.207490    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.207565    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Audit-Id: 7eee72f5-f5e5-4fe2-b1da-16ca2c975eb1
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.207565    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.207565    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.207627    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.207878    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  d0 27 0a ae 19 0a 18 63  6f 72 65 64 6e 73 2d 36  |.'.....coredns-6|
		00000020  36 38 64 36 62 66 39 62  63 2d 34 74 6e 36 38 12  |68d6bf9bc-4tn68.|
		00000030  13 63 6f 72 65 64 6e 73  2d 36 36 38 64 36 62 66  |.coredns-668d6bf|
		00000040  39 62 63 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |9bc-..kube-syste|
		00000050  6d 22 00 2a 24 37 63 36  65 34 61 62 63 2d 35 65  |m".*$7c6e4abc-5e|
		00000060  34 31 2d 34 33 39 36 2d  38 38 38 36 2d 30 37 35  |41-4396-8886-075|
		00000070  62 61 39 33 36 65 30 38  65 32 03 34 31 33 38 00  |ba936e08e2.4138.|
		00000080  42 08 08 f9 fc d5 bf 06  10 00 5a 13 0a 07 6b 38  |B.........Z...k8|
		00000090  73 2d 61 70 70 12 08 6b  75 62 65 2d 64 6e 73 5a  |s-app..kube-dnsZ|
		000000a0  1f 0a 11 70 6f 64 2d 74  65 6d 70 6c 61 74 65 2d  |...pod-template-|
		000000b0  68 61 73 68 12 0a 36 36  38 64 36 62 66 39 62 63  |hash..668d6bf9bc|
		000000c0  6a 53 0a 0a 52 65 70 6c  69 63 61 53 65 74 1a 12  |jS..ReplicaSet. [truncated 24167 chars]
	 >
	I0408 19:52:51.208181    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.208239    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.208239    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.208239    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.208298    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.210843    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.211761    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Audit-Id: 7a19c77d-fb90-474f-a800-6c4fa4257738
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.211761    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.211761    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.212073    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.212201    9872 pod_ready.go:93] pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.212224    9872 pod_ready.go:82] duration metric: took 2.5112192s for pod "coredns-668d6bf9bc-4tn68" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 pod_ready.go:79] waiting up to 6m0s for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.212252    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.212252    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/etcd-multinode-095200
	I0408 19:52:51.212252    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.212252    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.212252    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.217482    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.217482    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Audit-Id: 732c6cf5-66e8-4016-a3d7-6605f9fc7bd9
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.217482    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.217482    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.218135    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  a0 2b 0a 9c 1a 0a 15 65  74 63 64 2d 6d 75 6c 74  |.+.....etcd-mult|
		00000020  69 6e 6f 64 65 2d 30 39  35 32 30 30 12 00 1a 0b  |inode-095200....|
		00000030  6b 75 62 65 2d 73 79 73  74 65 6d 22 00 2a 24 63  |kube-system".*$c|
		00000040  34 30 34 32 37 35 65 2d  65 32 65 39 2d 34 31 32  |404275e-e2e9-412|
		00000050  32 2d 38 37 64 38 2d 66  66 36 33 65 63 65 31 33  |2-87d8-ff63ece13|
		00000060  30 64 32 32 03 33 36 38  38 00 42 08 08 f2 fc d5  |0d22.3688.B.....|
		00000070  bf 06 10 00 5a 11 0a 09  63 6f 6d 70 6f 6e 65 6e  |....Z...componen|
		00000080  74 12 04 65 74 63 64 5a  15 0a 04 74 69 65 72 12  |t..etcdZ...tier.|
		00000090  0d 63 6f 6e 74 72 6f 6c  2d 70 6c 61 6e 65 62 4e  |.control-planebN|
		000000a0  0a 30 6b 75 62 65 61 64  6d 2e 6b 75 62 65 72 6e  |.0kubeadm.kubern|
		000000b0  65 74 65 73 2e 69 6f 2f  65 74 63 64 2e 61 64 76  |etes.io/etcd.adv|
		000000c0  65 72 74 69 73 65 2d 63  6c 69 65 6e 74 2d 75 72  |ertise-client-u [truncated 26458 chars]
	 >
	I0408 19:52:51.218135    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.218135    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.218135    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.218135    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.218135    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.220516    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.220516    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.220516    9872 round_trippers.go:587]     Audit-Id: f9998752-e161-45d8-a355-d0b8a3dcb812
	I0408 19:52:51.220516    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.220516    9872 pod_ready.go:93] pod "etcd-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.220516    9872 pod_ready.go:82] duration metric: took 8.2639ms for pod "etcd-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.220516    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.220516    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-apiserver-multinode-095200
	I0408 19:52:51.220516    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.220516    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.220516    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.223887    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Audit-Id: 02f95fe3-18dd-4df2-a05a-dc673ceb8267
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.223887    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.223887    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.223887    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  85 34 0a ac 1c 0a 1f 6b  75 62 65 2d 61 70 69 73  |.4.....kube-apis|
		00000020  65 72 76 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |erver-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 39 38 31 33 32 38 35  |ystem".*$9813285|
		00000050  31 2d 34 61 37 36 2d 34  63 64 39 2d 62 36 61 62  |1-4a76-4cd9-b6ab|
		00000060  2d 65 35 38 35 62 64 33  32 63 33 62 35 32 03 33  |-e585bd32c3b52.3|
		00000070  34 33 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |438.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 61 70 69 73 65 72  76 65 72 5a 15 0a 04 74  |e-apiserverZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 55 0a 3f 6b 75  62 65 61 64 6d 2e 6b 75  |nebU.?kubeadm.ku|
		000000c0  62 65 72 6e 65 74 65 73  2e 69 6f 2f 6b 75 62 65  |bernetes.io/kub [truncated 31993 chars]
	 >
	I0408 19:52:51.223887    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.223887    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.223887    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.223887    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.223887    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.227440    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.227823    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.227823    9872 round_trippers.go:587]     Audit-Id: 0786ffd7-7b8a-4d35-a62f-af5c0c2bb9d8
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.227873    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.227873    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.227906    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.227906    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.228470    9872 pod_ready.go:93] pod "kube-apiserver-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.228510    9872 pod_ready.go:82] duration metric: took 7.994ms for pod "kube-apiserver-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228510    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.228624    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.228691    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-controller-manager-multinode-095200
	I0408 19:52:51.228691    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.228691    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.228691    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.230949    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.231292    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.231331    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Audit-Id: a8db91c3-54c9-4107-912a-a950c55551ed
	I0408 19:52:51.231331    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.231386    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.231807    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  eb 30 0a 99 1d 0a 28 6b  75 62 65 2d 63 6f 6e 74  |.0....(kube-cont|
		00000020  72 6f 6c 6c 65 72 2d 6d  61 6e 61 67 65 72 2d 6d  |roller-manager-m|
		00000030  75 6c 74 69 6e 6f 64 65  2d 30 39 35 32 30 30 12  |ultinode-095200.|
		00000040  00 1a 0b 6b 75 62 65 2d  73 79 73 74 65 6d 22 00  |...kube-system".|
		00000050  2a 24 64 30 38 33 30 63  38 39 2d 30 34 34 39 2d  |*$d0830c89-0449-|
		00000060  34 36 31 65 2d 39 64 32  32 2d 63 36 33 33 66 38  |461e-9d22-c633f8|
		00000070  33 31 34 37 33 61 32 03  33 31 39 38 00 42 08 08  |31473a2.3198.B..|
		00000080  f4 fc d5 bf 06 10 00 5a  24 0a 09 63 6f 6d 70 6f  |.......Z$..compo|
		00000090  6e 65 6e 74 12 17 6b 75  62 65 2d 63 6f 6e 74 72  |nent..kube-contr|
		000000a0  6f 6c 6c 65 72 2d 6d 61  6e 61 67 65 72 5a 15 0a  |oller-managerZ..|
		000000b0  04 74 69 65 72 12 0d 63  6f 6e 74 72 6f 6c 2d 70  |.tier..control-p|
		000000c0  6c 61 6e 65 62 3d 0a 19  6b 75 62 65 72 6e 65 74  |laneb=..kuberne [truncated 30008 chars]
	 >
	I0408 19:52:51.232002    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.232002    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.232002    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.232002    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.232002    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.235184    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.235228    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.235277    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Audit-Id: ad044c97-dfaa-452d-af8e-fc6c7e3e761f
	I0408 19:52:51.235277    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.236341    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.236512    9872 pod_ready.go:93] pod "kube-controller-manager-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.236561    9872 pod_ready.go:82] duration metric: took 8.051ms for pod "kube-controller-manager-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236594    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.236678    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.236755    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-proxy-4ntql
	I0408 19:52:51.236801    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.236834    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.236834    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.242251    9872 round_trippers.go:581] Response Status: 200 OK in 5 milliseconds
	I0408 19:52:51.242251    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Audit-Id: 7701521f-54b5-41a6-a3a6-13bbc2523c38
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.242251    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.242251    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.242875    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  9d 25 0a bf 14 0a 10 6b  75 62 65 2d 70 72 6f 78  |.%.....kube-prox|
		00000020  79 2d 34 6e 74 71 6c 12  0b 6b 75 62 65 2d 70 72  |y-4ntql..kube-pr|
		00000030  6f 78 79 2d 1a 0b 6b 75  62 65 2d 73 79 73 74 65  |oxy-..kube-syste|
		00000040  6d 22 00 2a 24 61 62 61  37 39 33 36 39 2d 32 30  |m".*$aba79369-20|
		00000050  34 64 2d 34 35 32 31 2d  61 62 38 64 2d 62 61 64  |4d-4521-ab8d-bad|
		00000060  61 64 34 30 64 36 38 38  65 32 03 33 36 32 38 00  |ad40d688e2.3628.|
		00000070  42 08 08 f9 fc d5 bf 06  10 00 5a 26 0a 18 63 6f  |B.........Z&..co|
		00000080  6e 74 72 6f 6c 6c 65 72  2d 72 65 76 69 73 69 6f  |ntroller-revisio|
		00000090  6e 2d 68 61 73 68 12 0a  37 62 62 38 34 63 34 39  |n-hash..7bb84c49|
		000000a0  38 34 5a 15 0a 07 6b 38  73 2d 61 70 70 12 0a 6b  |84Z...k8s-app..k|
		000000b0  75 62 65 2d 70 72 6f 78  79 5a 1c 0a 17 70 6f 64  |ube-proxyZ...pod|
		000000c0  2d 74 65 6d 70 6c 61 74  65 2d 67 65 6e 65 72 61  |-template-gener [truncated 22663 chars]
	 >
	I0408 19:52:51.242984    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.243127    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.243174    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.243174    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.243174    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.245486    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.246066    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Audit-Id: 8ae20b4e-84ea-4bb2-a477-343ab88bcd3c
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.246066    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.246066    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.246168    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.246168    9872 pod_ready.go:93] pod "kube-proxy-4ntql" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.246168    9872 pod_ready.go:82] duration metric: took 9.5734ms for pod "kube-proxy-4ntql" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 pod_ready.go:79] waiting up to 6m0s for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.246168    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.402939    9872 request.go:661] Waited for 156.77ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods/kube-scheduler-multinode-095200
	I0408 19:52:51.402939    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.403634    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.403634    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.407422    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.407495    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Audit-Id: 2f1155ac-bd1b-46ce-b9fe-f715c075b820
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.407495    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.407495    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.407848    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 09 0a 02  76 31 12 03 50 6f 64 12  |k8s.....v1..Pod.|
		00000010  f6 22 0a 81 18 0a 1f 6b  75 62 65 2d 73 63 68 65  |.".....kube-sche|
		00000020  64 75 6c 65 72 2d 6d 75  6c 74 69 6e 6f 64 65 2d  |duler-multinode-|
		00000030  30 39 35 32 30 30 12 00  1a 0b 6b 75 62 65 2d 73  |095200....kube-s|
		00000040  79 73 74 65 6d 22 00 2a  24 61 30 31 66 66 33 61  |ystem".*$a01ff3a|
		00000050  64 2d 66 30 37 61 2d 34  64 34 33 2d 61 36 66 31  |d-f07a-4d43-a6f1|
		00000060  2d 65 31 33 38 31 35 30  33 30 66 35 61 32 03 33  |-e13815030f5a2.3|
		00000070  36 39 38 00 42 08 08 f4  fc d5 bf 06 10 00 5a 1b  |698.B.........Z.|
		00000080  0a 09 63 6f 6d 70 6f 6e  65 6e 74 12 0e 6b 75 62  |..component..kub|
		00000090  65 2d 73 63 68 65 64 75  6c 65 72 5a 15 0a 04 74  |e-schedulerZ...t|
		000000a0  69 65 72 12 0d 63 6f 6e  74 72 6f 6c 2d 70 6c 61  |ier..control-pla|
		000000b0  6e 65 62 3d 0a 19 6b 75  62 65 72 6e 65 74 65 73  |neb=..kubernetes|
		000000c0  2e 69 6f 2f 63 6f 6e 66  69 67 2e 68 61 73 68 12  |.io/config.hash [truncated 21171 chars]
	 >
	I0408 19:52:51.408088    9872 type.go:168] "Request Body" body=""
	I0408 19:52:51.603014    9872 request.go:661] Waited for 194.9245ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes/multinode-095200
	I0408 19:52:51.603014    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.603014    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.603014    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.606573    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:51.606573    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Audit-Id: 7bca9e10-9992-4fa2-986c-2a216dfb30ab
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.606573    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.606573    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.607556    9872 type.go:168] "Response Body" body=<
		00000000  6b 38 73 00 0a 0a 0a 02  76 31 12 04 4e 6f 64 65  |k8s.....v1..Node|
		00000010  12 d7 22 0a 8a 11 0a 10  6d 75 6c 74 69 6e 6f 64  |..".....multinod|
		00000020  65 2d 30 39 35 32 30 30  12 00 1a 00 22 00 2a 24  |e-095200....".*$|
		00000030  66 30 63 62 35 36 34 36  2d 64 32 34 62 2d 34 64  |f0cb5646-d24b-4d|
		00000040  31 62 2d 38 38 34 61 2d  32 39 35 30 36 61 31 39  |1b-884a-29506a19|
		00000050  66 62 61 32 32 03 34 30  39 38 00 42 08 08 f0 fc  |fba22.4098.B....|
		00000060  d5 bf 06 10 00 5a 20 0a  17 62 65 74 61 2e 6b 75  |.....Z ..beta.ku|
		00000070  62 65 72 6e 65 74 65 73  2e 69 6f 2f 61 72 63 68  |bernetes.io/arch|
		00000080  12 05 61 6d 64 36 34 5a  1e 0a 15 62 65 74 61 2e  |..amd64Z...beta.|
		00000090  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 6f 73  |kubernetes.io/os|
		000000a0  12 05 6c 69 6e 75 78 5a  1b 0a 12 6b 75 62 65 72  |..linuxZ...kuber|
		000000b0  6e 65 74 65 73 2e 69 6f  2f 61 72 63 68 12 05 61  |netes.io/arch..a|
		000000c0  6d 64 36 34 5a 2a 0a 16  6b 75 62 65 72 6e 65 74  |md64Z*..kuberne [truncated 21015 chars]
	 >
	I0408 19:52:51.607556    9872 pod_ready.go:93] pod "kube-scheduler-multinode-095200" in "kube-system" namespace has status "Ready":"True"
	I0408 19:52:51.607556    9872 pod_ready.go:82] duration metric: took 361.3854ms for pod "kube-scheduler-multinode-095200" in "kube-system" namespace to be "Ready" ...
	I0408 19:52:51.607556    9872 pod_ready.go:39] duration metric: took 2.9180037s for extra waiting for all system-critical and pods with labels [k8s-app=kube-dns component=etcd component=kube-apiserver component=kube-controller-manager k8s-app=kube-proxy component=kube-scheduler] to be "Ready" ...
	I0408 19:52:51.607556    9872 api_server.go:52] waiting for apiserver process to appear ...
	I0408 19:52:51.619780    9872 ssh_runner.go:195] Run: sudo pgrep -xnf kube-apiserver.*minikube.*
	I0408 19:52:51.647663    9872 command_runner.go:130] > 2089
	I0408 19:52:51.647788    9872 api_server.go:72] duration metric: took 26.4545908s to wait for apiserver process to appear ...
	I0408 19:52:51.647788    9872 api_server.go:88] waiting for apiserver healthz status ...
	I0408 19:52:51.647788    9872 api_server.go:253] Checking apiserver healthz at https://172.22.37.202:8443/healthz ...
	I0408 19:52:51.658581    9872 api_server.go:279] https://172.22.37.202:8443/healthz returned 200:
	ok
	I0408 19:52:51.658761    9872 discovery_client.go:658] "Request Body" body=""
	I0408 19:52:51.658829    9872 round_trippers.go:470] GET https://172.22.37.202:8443/version
	I0408 19:52:51.658829    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.658888    9872 round_trippers.go:480]     Accept: application/json, */*
	I0408 19:52:51.658888    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.661055    9872 round_trippers.go:581] Response Status: 200 OK in 2 milliseconds
	I0408 19:52:51.661107    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.661107    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.661107    9872 round_trippers.go:587]     Content-Length: 263
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Audit-Id: b590b576-cb50-4b28-a8de-50b16cfa76ec
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.661161    9872 round_trippers.go:587]     Content-Type: application/json
	I0408 19:52:51.661161    9872 discovery_client.go:658] "Response Body" body=<
		{
		  "major": "1",
		  "minor": "32",
		  "gitVersion": "v1.32.2",
		  "gitCommit": "67a30c0adcf52bd3f56ff0893ce19966be12991f",
		  "gitTreeState": "clean",
		  "buildDate": "2025-02-12T21:19:47Z",
		  "goVersion": "go1.23.6",
		  "compiler": "gc",
		  "platform": "linux/amd64"
		}
	 >
	I0408 19:52:51.661361    9872 api_server.go:141] control plane version: v1.32.2
	I0408 19:52:51.661403    9872 api_server.go:131] duration metric: took 13.6144ms to wait for apiserver health ...
	I0408 19:52:51.661403    9872 system_pods.go:43] waiting for kube-system pods to appear ...
	I0408 19:52:51.661520    9872 type.go:204] "Request Body" body=""
	I0408 19:52:51.802419    9872 request.go:661] Waited for 140.8165ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:51.802419    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:51.802419    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:51.802419    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:51.809825    9872 round_trippers.go:581] Response Status: 200 OK in 7 milliseconds
	I0408 19:52:51.809825    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:51.809893    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:51 GMT
	I0408 19:52:51.809893    9872 round_trippers.go:587]     Audit-Id: bafb2d99-26b5-44bb-a309-31106bb703db
	I0408 19:52:51.811596    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:51.812162    9872 system_pods.go:59] 8 kube-system pods found
	I0408 19:52:51.812218    9872 system_pods.go:61] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:51.812218    9872 system_pods.go:61] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:51.812276    9872 system_pods.go:61] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:51.812276    9872 system_pods.go:74] duration metric: took 150.8726ms to wait for pod list to return data ...
	I0408 19:52:51.812348    9872 default_sa.go:34] waiting for default service account to be created ...
	I0408 19:52:51.812470    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.003061    9872 request.go:661] Waited for 190.5894ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/default/serviceaccounts
	I0408 19:52:52.003061    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.003061    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.003061    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.006914    9872 round_trippers.go:581] Response Status: 200 OK in 3 milliseconds
	I0408 19:52:52.006914    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Audit-Id: a637d434-a1be-4da0-a616-cf8ecf17d3e0
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.006914    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Content-Length: 128
	I0408 19:52:52.006914    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.006914    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 18 0a 02  76 31 12 12 53 65 72 76  |k8s.....v1..Serv|
		00000010  69 63 65 41 63 63 6f 75  6e 74 4c 69 73 74 12 5c  |iceAccountList.\|
		00000020  0a 09 0a 00 12 03 34 31  38 1a 00 12 4f 0a 4d 0a  |......418...O.M.|
		00000030  07 64 65 66 61 75 6c 74  12 00 1a 07 64 65 66 61  |.default....defa|
		00000040  75 6c 74 22 00 2a 24 35  33 64 30 36 30 66 38 2d  |ult".*$53d060f8-|
		00000050  31 66 37 34 2d 34 62 39  37 2d 38 61 38 39 2d 31  |1f74-4b97-8a89-1|
		00000060  35 39 62 36 63 33 62 65  66 63 39 32 03 33 31 32  |59b6c3befc92.312|
		00000070  38 00 42 08 08 f9 fc d5  bf 06 10 00 1a 00 22 00  |8.B...........".|
	 >
	I0408 19:52:52.006914    9872 default_sa.go:45] found service account: "default"
	I0408 19:52:52.006914    9872 default_sa.go:55] duration metric: took 194.5642ms for default service account to be created ...
	I0408 19:52:52.006914    9872 system_pods.go:116] waiting for k8s-apps to be running ...
	I0408 19:52:52.006914    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.203519    9872 request.go:661] Waited for 196.6036ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/namespaces/kube-system/pods
	I0408 19:52:52.203519    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.203519    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.203519    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.208304    9872 round_trippers.go:581] Response Status: 200 OK in 4 milliseconds
	I0408 19:52:52.208427    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Audit-Id: 84ce1cf3-db7a-4715-b298-40e0a0327e25
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.208427    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.208427    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.210076    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0d 0a 02  76 31 12 07 50 6f 64 4c  |k8s.....v1..PodL|
		00000010  69 73 74 12 fa c6 02 0a  09 0a 00 12 03 34 31 38  |ist..........418|
		00000020  1a 00 12 d0 27 0a ae 19  0a 18 63 6f 72 65 64 6e  |....'.....coredn|
		00000030  73 2d 36 36 38 64 36 62  66 39 62 63 2d 34 74 6e  |s-668d6bf9bc-4tn|
		00000040  36 38 12 13 63 6f 72 65  64 6e 73 2d 36 36 38 64  |68..coredns-668d|
		00000050  36 62 66 39 62 63 2d 1a  0b 6b 75 62 65 2d 73 79  |6bf9bc-..kube-sy|
		00000060  73 74 65 6d 22 00 2a 24  37 63 36 65 34 61 62 63  |stem".*$7c6e4abc|
		00000070  2d 35 65 34 31 2d 34 33  39 36 2d 38 38 38 36 2d  |-5e41-4396-8886-|
		00000080  30 37 35 62 61 39 33 36  65 30 38 65 32 03 34 31  |075ba936e08e2.41|
		00000090  33 38 00 42 08 08 f9 fc  d5 bf 06 10 00 5a 13 0a  |38.B.........Z..|
		000000a0  07 6b 38 73 2d 61 70 70  12 08 6b 75 62 65 2d 64  |.k8s-app..kube-d|
		000000b0  6e 73 5a 1f 0a 11 70 6f  64 2d 74 65 6d 70 6c 61  |nsZ...pod-templa|
		000000c0  74 65 2d 68 61 73 68 12  0a 36 36 38 64 36 62 66  |te-hash..668d6b [truncated 205787 chars]
	 >
	I0408 19:52:52.210419    9872 system_pods.go:86] 8 kube-system pods found
	I0408 19:52:52.210419    9872 system_pods.go:89] "coredns-668d6bf9bc-4tn68" [7c6e4abc-5e41-4396-8886-075ba936e08e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "etcd-multinode-095200" [c404275e-e2e9-4122-87d8-ff63ece130d2] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kindnet-bx7zx" [bf55feed-c31e-4730-86f1-0d04f66c8a9d] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-apiserver-multinode-095200" [98132851-4a76-4cd9-b6ab-e585bd32c3b5] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-controller-manager-multinode-095200" [d0830c89-0449-461e-9d22-c633f831473a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-proxy-4ntql" [aba79369-204d-4521-ab8d-badad40d688e] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "kube-scheduler-multinode-095200" [a01ff3ad-f07a-4d43-a6f1-e13815030f5a] Running
	I0408 19:52:52.210419    9872 system_pods.go:89] "storage-provisioner" [b68a8994-c73c-4400-bed6-5644b0542cde] Running
	I0408 19:52:52.210419    9872 system_pods.go:126] duration metric: took 203.5036ms to wait for k8s-apps to be running ...
	I0408 19:52:52.210419    9872 system_svc.go:44] waiting for kubelet service to be running ....
	I0408 19:52:52.222792    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service kubelet
	I0408 19:52:52.251103    9872 system_svc.go:56] duration metric: took 40.6834ms WaitForService to wait for kubelet
	I0408 19:52:52.251103    9872 kubeadm.go:582] duration metric: took 27.058026s to wait for: map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true]
	I0408 19:52:52.251103    9872 node_conditions.go:102] verifying NodePressure condition ...
	I0408 19:52:52.251103    9872 type.go:204] "Request Body" body=""
	I0408 19:52:52.403446    9872 request.go:661] Waited for 152.3415ms due to client-side throttling, not priority and fairness, request: GET:https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:470] GET https://172.22.37.202:8443/api/v1/nodes
	I0408 19:52:52.403446    9872 round_trippers.go:476] Request Headers:
	I0408 19:52:52.403446    9872 round_trippers.go:480]     Accept: application/vnd.kubernetes.protobuf,application/json
	I0408 19:52:52.403446    9872 round_trippers.go:480]     User-Agent: minikube-windows-amd64.exe/v0.0.0 (windows/amd64) kubernetes/$Format
	I0408 19:52:52.412138    9872 round_trippers.go:581] Response Status: 200 OK in 8 milliseconds
	I0408 19:52:52.412138    9872 round_trippers.go:584] Response Headers:
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Date: Tue, 08 Apr 2025 19:52:52 GMT
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Audit-Id: dc4626f5-ad91-4e2a-8710-3176b4249dc2
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Cache-Control: no-cache, private
	I0408 19:52:52.412138    9872 round_trippers.go:587]     Content-Type: application/vnd.kubernetes.protobuf
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Flowschema-Uid: 10152a92-e14d-454a-aed0-91137291a0fa
	I0408 19:52:52.412138    9872 round_trippers.go:587]     X-Kubernetes-Pf-Prioritylevel-Uid: 157dc250-2169-4573-b1d1-4365bad8c4c6
	I0408 19:52:52.412610    9872 type.go:204] "Response Body" body=<
		00000000  6b 38 73 00 0a 0e 0a 02  76 31 12 08 4e 6f 64 65  |k8s.....v1..Node|
		00000010  4c 69 73 74 12 e5 22 0a  09 0a 00 12 03 34 31 38  |List.."......418|
		00000020  1a 00 12 d7 22 0a 8a 11  0a 10 6d 75 6c 74 69 6e  |....".....multin|
		00000030  6f 64 65 2d 30 39 35 32  30 30 12 00 1a 00 22 00  |ode-095200....".|
		00000040  2a 24 66 30 63 62 35 36  34 36 2d 64 32 34 62 2d  |*$f0cb5646-d24b-|
		00000050  34 64 31 62 2d 38 38 34  61 2d 32 39 35 30 36 61  |4d1b-884a-29506a|
		00000060  31 39 66 62 61 32 32 03  34 30 39 38 00 42 08 08  |19fba22.4098.B..|
		00000070  f0 fc d5 bf 06 10 00 5a  20 0a 17 62 65 74 61 2e  |.......Z ..beta.|
		00000080  6b 75 62 65 72 6e 65 74  65 73 2e 69 6f 2f 61 72  |kubernetes.io/ar|
		00000090  63 68 12 05 61 6d 64 36  34 5a 1e 0a 15 62 65 74  |ch..amd64Z...bet|
		000000a0  61 2e 6b 75 62 65 72 6e  65 74 65 73 2e 69 6f 2f  |a.kubernetes.io/|
		000000b0  6f 73 12 05 6c 69 6e 75  78 5a 1b 0a 12 6b 75 62  |os..linuxZ...kub|
		000000c0  65 72 6e 65 74 65 73 2e  69 6f 2f 61 72 63 68 12  |ernetes.io/arch [truncated 21096 chars]
	 >
	I0408 19:52:52.412847    9872 node_conditions.go:122] node storage ephemeral capacity is 17734596Ki
	I0408 19:52:52.412948    9872 node_conditions.go:123] node cpu capacity is 2
	I0408 19:52:52.412948    9872 node_conditions.go:105] duration metric: took 161.8435ms to run NodePressure ...
	I0408 19:52:52.412948    9872 start.go:241] waiting for startup goroutines ...
	I0408 19:52:52.413034    9872 start.go:246] waiting for cluster config update ...
	I0408 19:52:52.413034    9872 start.go:255] writing updated cluster config ...
	I0408 19:52:52.417337    9872 out.go:201] 
	I0408 19:52:52.420643    9872 config.go:182] Loaded profile config "ha-089400": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436361    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:52:52.436489    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.442533    9872 out.go:177] * Starting "multinode-095200-m02" worker node in "multinode-095200" cluster
	I0408 19:52:52.445620    9872 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 19:52:52.445620    9872 cache.go:56] Caching tarball of preloaded images
	I0408 19:52:52.446731    9872 preload.go:172] Found C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 in cache, skipping download
	I0408 19:52:52.446731    9872 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 19:52:52.447575    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:52:52.455791    9872 start.go:360] acquireMachinesLock for multinode-095200-m02: {Name:mk88ace50ad3bf72786f3a589a5328076247f3a1 Clock:{} Delay:500ms Timeout:13m0s Cancel:<nil>}
	I0408 19:52:52.455791    9872 start.go:364] duration metric: took 0s to acquireMachinesLock for "multinode-095200-m02"
	I0408 19:52:52.455791    9872 start.go:93] Provisioning new machine with config: &{Name:multinode-095200 KeepContext:false EmbedCerts:false MinikubeISO:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:2200 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 Clus
terName:multinode-095200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local ContainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP:172.22.37.202 Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true} {Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}] Addons:map[default-storageclass:true storage-provisioner:true] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true apps_running:true default_sa:true extra:true kubelet:true node_ready:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:true ExtraDisks:0 CertExpiration:26280h0m0s
Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s} &{Name:m02 IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:false Worker:true}
	I0408 19:52:52.455791    9872 start.go:125] createHost starting for "m02" (driver="hyperv")
	I0408 19:52:52.459917    9872 out.go:235] * Creating hyperv VM (CPUs=2, Memory=2200MB, Disk=20000MB) ...
	I0408 19:52:52.460924    9872 start.go:159] libmachine.API.Create for "multinode-095200" (driver="hyperv")
	I0408 19:52:52.460924    9872 client.go:168] LocalClient.Create starting
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem
	I0408 19:52:52.461202    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.461786    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462084    9872 main.go:141] libmachine: Reading certificate data from C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Decoding PEM data...
	I0408 19:52:52.462409    9872 main.go:141] libmachine: Parsing certificate...
	I0408 19:52:52.462566    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @(Get-Module -ListAvailable hyper-v).Name | Get-Unique
	I0408 19:52:54.525061    9872 main.go:141] libmachine: [stdout =====>] : Hyper-V
	
	I0408 19:52:54.526010    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:54.526133    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole(([System.Security.Principal.SecurityIdentifier]::new("S-1-5-32-578")))
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stdout =====>] : False
	
	I0408 19:52:56.398389    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:56.398452    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:52:57.984271    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:52:57.984450    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:52:57.984520    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:01.897463    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:01.898553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:01.900585    9872 main.go:141] libmachine: Downloading C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\boot2docker.iso from file://C:/Users/jenkins.minikube3/minikube-integration/.minikube/cache/iso/amd64/minikube-v1.35.0-amd64.iso...
	I0408 19:53:02.523557    9872 main.go:141] libmachine: Creating SSH key...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: Creating VM...
	I0408 19:53:02.612867    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive [Console]::OutputEncoding = [Text.Encoding]::UTF8; ConvertTo-Json @(Hyper-V\Get-VMSwitch|Select Id, Name, SwitchType|Where-Object {($_.SwitchType -eq 'External') -or ($_.Id -eq 'c08cb7b8-9b3c-408e-8e30-5e16a3aeb444')}|Sort-Object -Property SwitchType)
	I0408 19:53:05.732036    9872 main.go:141] libmachine: [stdout =====>] : [
	    {
	        "Id":  "c08cb7b8-9b3c-408e-8e30-5e16a3aeb444",
	        "Name":  "Default Switch",
	        "SwitchType":  1
	    }
	]
	
	I0408 19:53:05.732397    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:05.732490    9872 main.go:141] libmachine: Using switch "Default Switch"
	I0408 19:53:05.732490    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive @([Security.Principal.WindowsPrincipal][Security.Principal.WindowsIdentity]::GetCurrent()).IsInRole([Security.Principal.WindowsBuiltInRole] "Administrator")
	I0408 19:53:07.594878    9872 main.go:141] libmachine: [stdout =====>] : True
	
	I0408 19:53:07.595889    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:07.596096    9872 main.go:141] libmachine: Creating VHD
	I0408 19:53:07.596096    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -SizeBytes 10MB -Fixed
	I0408 19:53:11.577042    9872 main.go:141] libmachine: [stdout =====>] : 
	
	ComputerName            : minikube3
	Path                    : C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed
	                          .vhd
	VhdFormat               : VHD
	VhdType                 : Fixed
	FileSize                : 10486272
	Size                    : 10485760
	MinimumSize             : 
	LogicalSectorSize       : 512
	PhysicalSectorSize      : 512
	BlockSize               : 0
	ParentPath              : 
	DiskIdentifier          : 2FD5440F-954A-47F9-B4A8-17FE393653DF
	FragmentationPercentage : 0
	Alignment               : 1
	Attached                : False
	DiskNumber              : 
	IsPMEMCompatible        : False
	AddressAbstractionType  : None
	Number                  : 
	
	
	
	
	I0408 19:53:11.577502    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing magic tar header
	I0408 19:53:11.577502    9872 main.go:141] libmachine: Writing SSH key tar header
	I0408 19:53:11.591964    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Convert-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\fixed.vhd' -DestinationPath 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -VHDType Dynamic -DeleteSource
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:14.963199    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:14.964267    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Resize-VHD -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd' -SizeBytes 20000MB
	I0408 19:53:17.689976    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:17.690380    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:17.690465    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\New-VM multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02' -SwitchName 'Default Switch' -MemoryStartupBytes 2200MB
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stdout =====>] : 
	Name                 State CPUUsage(%) MemoryAssigned(M) Uptime   Status             Version
	----                 ----- ----------- ----------------- ------   ------             -------
	multinode-095200-m02 Off   0           0                 00:00:00 Operating normally 9.0    
	
	
	
	I0408 19:53:21.625100    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:21.625833    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMMemory -VMName multinode-095200-m02 -DynamicMemoryEnabled $false
	I0408 19:53:24.085069    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:24.085908    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMProcessor multinode-095200-m02 -Count 2
	I0408 19:53:26.428616    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:26.428803    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Set-VMDvdDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\boot2docker.iso'
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:29.162617    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:29.163472    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Add-VMHardDiskDrive -VMName multinode-095200-m02 -Path 'C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\disk.vhd'
	I0408 19:53:31.982190    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:31.982452    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:31.982543    9872 main.go:141] libmachine: Starting VM...
	I0408 19:53:31.982543    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive Hyper-V\Start-VM multinode-095200-m02
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:35.272489    9872 main.go:141] libmachine: Waiting for host to start...
	I0408 19:53:35.272489    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:37.767993    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:37.768715    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:40.460161    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:40.460920    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:41.461007    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:43.851799    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:43.852207    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:43.852295    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:46.536555    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:47.537627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:49.899335    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:52.601424    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:53.601814    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:55.905078    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stdout =====>] : 
	I0408 19:53:58.586979    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:53:59.588060    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:01.962254    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:01.962534    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:04.645041    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:04.645414    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:06.903160    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:06.903963    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:06.904065    9872 machine.go:93] provisionDockerMachine start ...
	I0408 19:54:06.904185    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:09.216553    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:11.884844    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:11.891802    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:11.907452    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:11.907452    9872 main.go:141] libmachine: About to run SSH command:
	hostname
	I0408 19:54:12.041104    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: minikube
	
	I0408 19:54:12.041104    9872 buildroot.go:166] provisioning hostname "multinode-095200-m02"
	I0408 19:54:12.041239    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:14.346493    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:14.347500    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:14.347643    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:17.077188    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:17.082859    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:17.083594    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:17.083666    9872 main.go:141] libmachine: About to run SSH command:
	sudo hostname multinode-095200-m02 && echo "multinode-095200-m02" | sudo tee /etc/hostname
	I0408 19:54:17.245305    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: multinode-095200-m02
	
	I0408 19:54:17.245305    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:19.491991    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:19.492729    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:22.179670    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:22.180640    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:22.186476    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:22.187018    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:22.187018    9872 main.go:141] libmachine: About to run SSH command:
	
			if ! grep -xq '.*\smultinode-095200-m02' /etc/hosts; then
				if grep -xq '127.0.1.1\s.*' /etc/hosts; then
					sudo sed -i 's/^127.0.1.1\s.*/127.0.1.1 multinode-095200-m02/g' /etc/hosts;
				else 
					echo '127.0.1.1 multinode-095200-m02' | sudo tee -a /etc/hosts; 
				fi
			fi
	I0408 19:54:22.337404    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 
	I0408 19:54:22.337404    9872 buildroot.go:172] set auth options {CertDir:C:\Users\jenkins.minikube3\minikube-integration\.minikube CaCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem CaPrivateKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem CaCertRemotePath:/etc/docker/ca.pem ServerCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ServerKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem ClientKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem ServerCertRemotePath:/etc/docker/server.pem ServerKeyRemotePath:/etc/docker/server-key.pem ClientCertPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem ServerCertSANs:[] StorePath:C:\Users\jenkins.minikube3\minikube-integration\.minikube}
	I0408 19:54:22.337404    9872 buildroot.go:174] setting up certificates
	I0408 19:54:22.337404    9872 provision.go:84] configureAuth start
	I0408 19:54:22.337404    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:24.612186    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:24.612401    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:27.290471    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:29.513085    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:29.513212    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:32.153529    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:32.154654    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:32.154654    9872 provision.go:143] copyHostCerts
	I0408 19:54:32.154720    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem
	I0408 19:54:32.154720    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem, removing ...
	I0408 19:54:32.154720    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\ca.pem
	I0408 19:54:32.155773    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/ca.pem (1078 bytes)
	I0408 19:54:32.156776    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem
	I0408 19:54:32.157424    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem, removing ...
	I0408 19:54:32.157424    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cert.pem
	I0408 19:54:32.158603    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\cert.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/cert.pem (1123 bytes)
	I0408 19:54:32.159435    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem -> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem
	I0408 19:54:32.160116    9872 exec_runner.go:144] found C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem, removing ...
	I0408 19:54:32.160226    9872 exec_runner.go:203] rm: C:\Users\jenkins.minikube3\minikube-integration\.minikube\key.pem
	I0408 19:54:32.160407    9872 exec_runner.go:151] cp: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\key.pem --> C:\Users\jenkins.minikube3\minikube-integration\.minikube/key.pem (1675 bytes)
	I0408 19:54:32.161212    9872 provision.go:117] generating server cert: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem ca-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem private-key=C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca-key.pem org=jenkins.multinode-095200-m02 san=[127.0.0.1 172.22.33.56 localhost minikube multinode-095200-m02]
	I0408 19:54:32.324126    9872 provision.go:177] copyRemoteCerts
	I0408 19:54:32.334660    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/docker /etc/docker /etc/docker
	I0408 19:54:32.334660    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:34.598875    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:34.599340    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:37.272651    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:37.273390    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:54:37.374824    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/docker /etc/docker /etc/docker: (5.0401249s)
	I0408 19:54:37.374945    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem -> /etc/docker/ca.pem
	I0408 19:54:37.375508    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\certs\ca.pem --> /etc/docker/ca.pem (1078 bytes)
	I0408 19:54:37.423226    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem -> /etc/docker/server.pem
	I0408 19:54:37.423679    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server.pem --> /etc/docker/server.pem (1229 bytes)
	I0408 19:54:37.471478    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem -> /etc/docker/server-key.pem
	I0408 19:54:37.472012    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\server-key.pem --> /etc/docker/server-key.pem (1679 bytes)
	I0408 19:54:37.518770    9872 provision.go:87] duration metric: took 15.1812494s to configureAuth
	I0408 19:54:37.518770    9872 buildroot.go:189] setting minikube options for container-runtime
	I0408 19:54:37.519941    9872 config.go:182] Loaded profile config "multinode-095200": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 19:54:37.520064    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:39.799700    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:39.799959    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:42.497439    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:42.498072    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:42.503538    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:42.504262    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:42.504361    9872 main.go:141] libmachine: About to run SSH command:
	df --output=fstype / | tail -n 1
	I0408 19:54:42.637892    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: tmpfs
	
	I0408 19:54:42.637892    9872 buildroot.go:70] root file system type: tmpfs
	I0408 19:54:42.638421    9872 provision.go:314] Updating docker unit: /lib/systemd/system/docker.service ...
	I0408 19:54:42.638546    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:44.883057    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:44.883630    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:47.555664    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:47.561606    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:47.561606    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:47.562257    9872 main.go:141] libmachine: About to run SSH command:
	sudo mkdir -p /lib/systemd/system && printf %s "[Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment="NO_PROXY=172.22.37.202"
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP \$MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	" | sudo tee /lib/systemd/system/docker.service.new
	I0408 19:54:47.727227    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: [Unit]
	Description=Docker Application Container Engine
	Documentation=https://docs.docker.com
	After=network.target  minikube-automount.service docker.socket
	Requires= minikube-automount.service docker.socket 
	StartLimitBurst=3
	StartLimitIntervalSec=60
	
	[Service]
	Type=notify
	Restart=on-failure
	
	Environment=NO_PROXY=172.22.37.202
	
	
	# This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	# The base configuration already specifies an 'ExecStart=...' command. The first directive
	# here is to clear out that command inherited from the base configuration. Without this,
	# the command from the base configuration and the command specified here are treated as
	# a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	# will catch this invalid input and refuse to start the service with an error like:
	#  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	
	# NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	# container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	ExecStart=
	ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	ExecReload=/bin/kill -s HUP $MAINPID
	
	# Having non-zero Limit*s causes performance problems due to accounting overhead
	# in the kernel. We recommend using cgroups to do container-local accounting.
	LimitNOFILE=infinity
	LimitNPROC=infinity
	LimitCORE=infinity
	
	# Uncomment TasksMax if your systemd version supports it.
	# Only systemd 226 and above support this version.
	TasksMax=infinity
	TimeoutStartSec=0
	
	# set delegate yes so that systemd does not reset the cgroups of docker containers
	Delegate=yes
	
	# kill only the docker process, not all processes in the cgroup
	KillMode=process
	
	[Install]
	WantedBy=multi-user.target
	
	I0408 19:54:47.727227    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:50.007737    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:54:52.747088    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:54:52.748078    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:52.754162    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:54:52.754417    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:54:52.754417    9872 main.go:141] libmachine: About to run SSH command:
	sudo diff -u /lib/systemd/system/docker.service /lib/systemd/system/docker.service.new || { sudo mv /lib/systemd/system/docker.service.new /lib/systemd/system/docker.service; sudo systemctl -f daemon-reload && sudo systemctl -f enable docker && sudo systemctl -f restart docker; }
	I0408 19:54:55.088890    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: diff: can't stat '/lib/systemd/system/docker.service': No such file or directory
	Created symlink /etc/systemd/system/multi-user.target.wants/docker.service → /usr/lib/systemd/system/docker.service.
	
	I0408 19:54:55.089464    9872 machine.go:96] duration metric: took 48.1850287s to provisionDockerMachine
	I0408 19:54:55.089464    9872 client.go:171] duration metric: took 2m2.6275969s to LocalClient.Create
	I0408 19:54:55.089464    9872 start.go:167] duration metric: took 2m2.6275969s to libmachine.API.Create "multinode-095200"
	I0408 19:54:55.089464    9872 start.go:293] postStartSetup for "multinode-095200-m02" (driver="hyperv")
	I0408 19:54:55.089464    9872 start.go:322] creating required directories: [/etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs]
	I0408 19:54:55.101732    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs
	I0408 19:54:55.101732    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:54:57.329342    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:54:57.330236    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:00.079336    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:00.079870    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:00.080350    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:00.195063    9872 ssh_runner.go:235] Completed: sudo mkdir -p /etc/kubernetes/addons /etc/kubernetes/manifests /var/tmp/minikube /var/lib/minikube /var/lib/minikube/certs /var/lib/minikube/images /var/lib/minikube/binaries /tmp/gvisor /usr/share/ca-certificates /etc/ssl/certs: (5.0931882s)
	I0408 19:55:00.207511    9872 ssh_runner.go:195] Run: cat /etc/os-release
	I0408 19:55:00.215515    9872 command_runner.go:130] > NAME=Buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION=2023.02.9-dirty
	I0408 19:55:00.215515    9872 command_runner.go:130] > ID=buildroot
	I0408 19:55:00.215515    9872 command_runner.go:130] > VERSION_ID=2023.02.9
	I0408 19:55:00.215515    9872 command_runner.go:130] > PRETTY_NAME="Buildroot 2023.02.9"
	I0408 19:55:00.215515    9872 info.go:137] Remote host: Buildroot 2023.02.9
	I0408 19:55:00.215515    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\addons for local assets ...
	I0408 19:55:00.216532    9872 filesync.go:126] Scanning C:\Users\jenkins.minikube3\minikube-integration\.minikube\files for local assets ...
	I0408 19:55:00.217511    9872 filesync.go:149] local asset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> 79042.pem in /etc/ssl/certs
	I0408 19:55:00.217511    9872 vm_assets.go:164] NewFileAsset: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem -> /etc/ssl/certs/79042.pem
	I0408 19:55:00.232510    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/ssl/certs
	I0408 19:55:00.251138    9872 ssh_runner.go:362] scp C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\ssl\certs\79042.pem --> /etc/ssl/certs/79042.pem (1708 bytes)
	I0408 19:55:00.301370    9872 start.go:296] duration metric: took 5.2118669s for postStartSetup
	I0408 19:55:00.303865    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:02.586887    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:02.587759    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:05.325238    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:05.325915    9872 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\multinode-095200\config.json ...
	I0408 19:55:05.328095    9872 start.go:128] duration metric: took 2m12.8712825s to createHost
	I0408 19:55:05.328628    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:07.594167    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:07.594508    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:10.339593    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:10.350563    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:10.351730    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:10.351730    9872 main.go:141] libmachine: About to run SSH command:
	date +%s.%N
	I0408 19:55:10.491524    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: 1744142110.496323222
	
	I0408 19:55:10.491656    9872 fix.go:216] guest clock: 1744142110.496323222
	I0408 19:55:10.491656    9872 fix.go:229] Guest: 2025-04-08 19:55:10.496323222 +0000 UTC Remote: 2025-04-08 19:55:05.3286282 +0000 UTC m=+364.651784601 (delta=5.167695022s)
	I0408 19:55:10.491656    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:12.849272    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:12.849821    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:15.651831    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:15.652257    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:15.658941    9872 main.go:141] libmachine: Using SSH client type: native
	I0408 19:55:15.659584    9872 main.go:141] libmachine: &{{{<nil> 0 [] [] []} docker [0x13c7d00] 0x13ca840 <nil>  [] 0s} 172.22.33.56 22 <nil> <nil>}
	I0408 19:55:15.659584    9872 main.go:141] libmachine: About to run SSH command:
	sudo date -s @1744142110
	I0408 19:55:15.813362    9872 main.go:141] libmachine: SSH cmd err, output: <nil>: Tue Apr  8 19:55:10 UTC 2025
	
	I0408 19:55:15.813362    9872 fix.go:236] clock set: Tue Apr  8 19:55:10 UTC 2025
	 (err=<nil>)
	I0408 19:55:15.813362    9872 start.go:83] releasing machines lock for "multinode-095200-m02", held for 2m23.35647s
	I0408 19:55:15.813627    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:18.101228    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:20.894229    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:20.898140    9872 out.go:177] * Found network options:
	I0408 19:55:20.901029    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.903474    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.905881    9872 out.go:177]   - NO_PROXY=172.22.37.202
	W0408 19:55:20.908951    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	W0408 19:55:20.910245    9872 proxy.go:119] fail to check proxy env: Error ip not in block
	I0408 19:55:20.912681    9872 ssh_runner.go:195] Run: curl.exe -sS -m 2 https://registry.k8s.io/
	I0408 19:55:20.912681    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:20.923970    9872 ssh_runner.go:195] Run: sh -c "stat /etc/cni/net.d/*loopback.conf*"
	I0408 19:55:20.923970    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive ( Hyper-V\Get-VM multinode-095200-m02 ).state
	I0408 19:55:23.304513    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.305087    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stdout =====>] : Running
	
	I0408 19:55:23.331738    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:23.331843    9872 main.go:141] libmachine: [executing ==>] : C:\WINDOWS\System32\WindowsPowerShell\v1.0\powershell.exe -NoProfile -NonInteractive (( Hyper-V\Get-VM multinode-095200-m02 ).networkadapters[0]).ipaddresses[0]
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.176007    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.176396    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stdout =====>] : 172.22.33.56
	
	I0408 19:55:26.188642    9872 main.go:141] libmachine: [stderr =====>] : 
	I0408 19:55:26.188642    9872 sshutil.go:53] new ssh client: &{IP:172.22.33.56 Port:22 SSHKeyPath:C:\Users\jenkins.minikube3\minikube-integration\.minikube\machines\multinode-095200-m02\id_rsa Username:docker}
	I0408 19:55:26.277947    9872 command_runner.go:130] ! stat: cannot statx '/etc/cni/net.d/*loopback.conf*': No such file or directory
	I0408 19:55:26.278419    9872 ssh_runner.go:235] Completed: sh -c "stat /etc/cni/net.d/*loopback.conf*": (5.3543374s)
	W0408 19:55:26.278466    9872 cni.go:209] loopback cni configuration skipped: "/etc/cni/net.d/*loopback.conf*" not found
	I0408 19:55:26.289094    9872 command_runner.go:130] ! bash: line 1: curl.exe: command not found
	I0408 19:55:26.289094    9872 ssh_runner.go:235] Completed: curl.exe -sS -m 2 https://registry.k8s.io/: (5.3763716s)
	W0408 19:55:26.289094    9872 start.go:867] [curl.exe -sS -m 2 https://registry.k8s.io/] failed: curl.exe -sS -m 2 https://registry.k8s.io/: Process exited with status 127
	stdout:
	
	stderr:
	bash: line 1: curl.exe: command not found
	I0408 19:55:26.292222    9872 ssh_runner.go:195] Run: sudo find /etc/cni/net.d -maxdepth 1 -type f ( ( -name *bridge* -or -name *podman* ) -and -not -name *.mk_disabled ) -printf "%p, " -exec sh -c "sudo mv {} {}.mk_disabled" ;
	I0408 19:55:26.327385    9872 command_runner.go:139] > /etc/cni/net.d/87-podman-bridge.conflist, 
	I0408 19:55:26.327478    9872 cni.go:262] disabled [/etc/cni/net.d/87-podman-bridge.conflist] bridge cni config(s)
	I0408 19:55:26.327538    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:26.327924    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///run/containerd/containerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:26.369419    9872 command_runner.go:130] > runtime-endpoint: unix:///run/containerd/containerd.sock
	I0408 19:55:26.379095    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)sandbox_image = .*$|\1sandbox_image = "registry.k8s.io/pause:3.10"|' /etc/containerd/config.toml"
	W0408 19:55:26.388182    9872 out.go:270] ! Failing to connect to https://registry.k8s.io/ from inside the minikube VM
	W0408 19:55:26.388274    9872 out.go:270] * To pull new external images, you may need to configure a proxy: https://minikube.sigs.k8s.io/docs/reference/networking/proxy/
	I0408 19:55:26.417855    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)restrict_oom_score_adj = .*$|\1restrict_oom_score_adj = false|' /etc/containerd/config.toml"
	I0408 19:55:26.437529    9872 containerd.go:146] configuring containerd to use "cgroupfs" as cgroup driver...
	I0408 19:55:26.448526    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)SystemdCgroup = .*$|\1SystemdCgroup = false|g' /etc/containerd/config.toml"
	I0408 19:55:26.481804    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runtime.v1.linux"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.514798    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/systemd_cgroup/d' /etc/containerd/config.toml"
	I0408 19:55:26.546001    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i 's|"io.containerd.runc.v1"|"io.containerd.runc.v2"|g' /etc/containerd/config.toml"
	I0408 19:55:26.578303    9872 ssh_runner.go:195] Run: sh -c "sudo rm -rf /etc/cni/net.mk"
	I0408 19:55:26.610707    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)conf_dir = .*$|\1conf_dir = "/etc/cni/net.d"|g' /etc/containerd/config.toml"
	I0408 19:55:26.642654    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i '/^ *enable_unprivileged_ports = .*/d' /etc/containerd/config.toml"
	I0408 19:55:26.674410    9872 ssh_runner.go:195] Run: sh -c "sudo sed -i -r 's|^( *)\[plugins."io.containerd.grpc.v1.cri"\]|&\n\1  enable_unprivileged_ports = true|' /etc/containerd/config.toml"
	I0408 19:55:26.710783    9872 ssh_runner.go:195] Run: sudo sysctl net.bridge.bridge-nf-call-iptables
	I0408 19:55:26.727858    9872 command_runner.go:130] ! sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.728943    9872 crio.go:166] couldn't verify netfilter by "sudo sysctl net.bridge.bridge-nf-call-iptables" which might be okay. error: sudo sysctl net.bridge.bridge-nf-call-iptables: Process exited with status 255
	stdout:
	
	stderr:
	sysctl: cannot stat /proc/sys/net/bridge/bridge-nf-call-iptables: No such file or directory
	I0408 19:55:26.740966    9872 ssh_runner.go:195] Run: sudo modprobe br_netfilter
	I0408 19:55:26.776381    9872 ssh_runner.go:195] Run: sudo sh -c "echo 1 > /proc/sys/net/ipv4/ip_forward"
	I0408 19:55:26.807253    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:27.019654    9872 ssh_runner.go:195] Run: sudo systemctl restart containerd
	I0408 19:55:27.056064    9872 start.go:495] detecting cgroup driver to use...
	I0408 19:55:27.070461    9872 ssh_runner.go:195] Run: sudo systemctl cat docker.service
	I0408 19:55:27.100723    9872 command_runner.go:130] > # /usr/lib/systemd/system/docker.service
	I0408 19:55:27.100821    9872 command_runner.go:130] > [Unit]
	I0408 19:55:27.100821    9872 command_runner.go:130] > Description=Docker Application Container Engine
	I0408 19:55:27.100821    9872 command_runner.go:130] > Documentation=https://docs.docker.com
	I0408 19:55:27.100821    9872 command_runner.go:130] > After=network.target  minikube-automount.service docker.socket
	I0408 19:55:27.100821    9872 command_runner.go:130] > Requires= minikube-automount.service docker.socket 
	I0408 19:55:27.100901    9872 command_runner.go:130] > StartLimitBurst=3
	I0408 19:55:27.100931    9872 command_runner.go:130] > StartLimitIntervalSec=60
	I0408 19:55:27.100931    9872 command_runner.go:130] > [Service]
	I0408 19:55:27.100931    9872 command_runner.go:130] > Type=notify
	I0408 19:55:27.100931    9872 command_runner.go:130] > Restart=on-failure
	I0408 19:55:27.100931    9872 command_runner.go:130] > Environment=NO_PROXY=172.22.37.202
	I0408 19:55:27.100931    9872 command_runner.go:130] > # This file is a systemd drop-in unit that inherits from the base dockerd configuration.
	I0408 19:55:27.100993    9872 command_runner.go:130] > # The base configuration already specifies an 'ExecStart=...' command. The first directive
	I0408 19:55:27.100993    9872 command_runner.go:130] > # here is to clear out that command inherited from the base configuration. Without this,
	I0408 19:55:27.101049    9872 command_runner.go:130] > # the command from the base configuration and the command specified here are treated as
	I0408 19:55:27.101070    9872 command_runner.go:130] > # a sequence of commands, which is not the desired behavior, nor is it valid -- systemd
	I0408 19:55:27.101070    9872 command_runner.go:130] > # will catch this invalid input and refuse to start the service with an error like:
	I0408 19:55:27.101070    9872 command_runner.go:130] > #  Service has more than one ExecStart= setting, which is only allowed for Type=oneshot services.
	I0408 19:55:27.101127    9872 command_runner.go:130] > # NOTE: default-ulimit=nofile is set to an arbitrary number for consistency with other
	I0408 19:55:27.101158    9872 command_runner.go:130] > # container runtimes. If left unlimited, it may result in OOM issues with MySQL.
	I0408 19:55:27.101158    9872 command_runner.go:130] > ExecStart=
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecStart=/usr/bin/dockerd -H tcp://0.0.0.0:2376 -H unix:///var/run/docker.sock --default-ulimit=nofile=1048576:1048576 --tlsverify --tlscacert /etc/docker/ca.pem --tlscert /etc/docker/server.pem --tlskey /etc/docker/server-key.pem --label provider=hyperv --insecure-registry 10.96.0.0/12 
	I0408 19:55:27.101207    9872 command_runner.go:130] > ExecReload=/bin/kill -s HUP $MAINPID
	I0408 19:55:27.101207    9872 command_runner.go:130] > # Having non-zero Limit*s causes performance problems due to accounting overhead
	I0408 19:55:27.101207    9872 command_runner.go:130] > # in the kernel. We recommend using cgroups to do container-local accounting.
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNOFILE=infinity
	I0408 19:55:27.101272    9872 command_runner.go:130] > LimitNPROC=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > LimitCORE=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Uncomment TasksMax if your systemd version supports it.
	I0408 19:55:27.101291    9872 command_runner.go:130] > # Only systemd 226 and above support this version.
	I0408 19:55:27.101291    9872 command_runner.go:130] > TasksMax=infinity
	I0408 19:55:27.101291    9872 command_runner.go:130] > TimeoutStartSec=0
	I0408 19:55:27.101351    9872 command_runner.go:130] > # set delegate yes so that systemd does not reset the cgroups of docker containers
	I0408 19:55:27.101371    9872 command_runner.go:130] > Delegate=yes
	I0408 19:55:27.101371    9872 command_runner.go:130] > # kill only the docker process, not all processes in the cgroup
	I0408 19:55:27.101371    9872 command_runner.go:130] > KillMode=process
	I0408 19:55:27.101371    9872 command_runner.go:130] > [Install]
	I0408 19:55:27.101371    9872 command_runner.go:130] > WantedBy=multi-user.target
	I0408 19:55:27.116125    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.158592    9872 ssh_runner.go:195] Run: sudo systemctl stop -f containerd
	I0408 19:55:27.214106    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service containerd
	I0408 19:55:27.256823    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.296867    9872 ssh_runner.go:195] Run: sudo systemctl stop -f crio
	I0408 19:55:27.362992    9872 ssh_runner.go:195] Run: sudo systemctl is-active --quiet service crio
	I0408 19:55:27.390233    9872 ssh_runner.go:195] Run: /bin/bash -c "sudo mkdir -p /etc && printf %s "runtime-endpoint: unix:///var/run/cri-dockerd.sock
	" | sudo tee /etc/crictl.yaml"
	I0408 19:55:27.424669    9872 command_runner.go:130] > runtime-endpoint: unix:///var/run/cri-dockerd.sock
	I0408 19:55:27.435283    9872 ssh_runner.go:195] Run: which cri-dockerd
	I0408 19:55:27.443469    9872 command_runner.go:130] > /usr/bin/cri-dockerd
	I0408 19:55:27.456498    9872 ssh_runner.go:195] Run: sudo mkdir -p /etc/systemd/system/cri-docker.service.d
	I0408 19:55:27.475244    9872 ssh_runner.go:362] scp memory --> /etc/systemd/system/cri-docker.service.d/10-cni.conf (190 bytes)
	I0408 19:55:27.531230    9872 ssh_runner.go:195] Run: sudo systemctl unmask docker.service
	I0408 19:55:27.766251    9872 ssh_runner.go:195] Run: sudo systemctl enable docker.socket
	I0408 19:55:27.969410    9872 docker.go:574] configuring docker to use "cgroupfs" as cgroup driver...
	I0408 19:55:27.969593    9872 ssh_runner.go:362] scp memory --> /etc/docker/daemon.json (130 bytes)
	I0408 19:55:28.023527    9872 ssh_runner.go:195] Run: sudo systemctl daemon-reload
	I0408 19:55:28.256767    9872 ssh_runner.go:195] Run: sudo systemctl restart docker
	I0408 19:56:29.375667    9872 command_runner.go:130] ! Job for docker.service failed because the control process exited with error code.
	I0408 19:56:29.375736    9872 command_runner.go:130] ! See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	I0408 19:56:29.375968    9872 ssh_runner.go:235] Completed: sudo systemctl restart docker: (1m1.1187313s)
	I0408 19:56:29.388548    9872 ssh_runner.go:195] Run: sudo journalctl --no-pager -u docker
	I0408 19:56:29.414684    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.414789    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	I0408 19:56:29.414849    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	I0408 19:56:29.414917    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	I0408 19:56:29.414997    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	I0408 19:56:29.415023    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415617    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415821    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	I0408 19:56:29.415902    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.415977    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416062    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416191    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416402    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416449    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416470    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416532    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	I0408 19:56:29.416583    9872 command_runner.go:130] > Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	I0408 19:56:29.417169    9872 command_runner.go:130] > Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	I0408 19:56:29.425924    9872 out.go:201] 
	W0408 19:56:29.428252    9872 out.go:270] X Exiting due to RUNTIME_ENABLE: Failed to enable container runtime: sudo systemctl restart docker: Process exited with status 1
	stdout:
	
	stderr:
	Job for docker.service failed because the control process exited with error code.
	See "systemctl status docker.service" and "journalctl -xeu docker.service" for details.
	
	sudo journalctl --no-pager -u docker:
	-- stdout --
	Apr 08 19:54:53 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.389676225Z" level=info msg="Starting up"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.390935445Z" level=info msg="containerd not running, starting managed containerd"
	Apr 08 19:54:53 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:53.391985163Z" level=info msg="started new containerd process" address=/var/run/docker/containerd/containerd.sock module=libcontainerd pid=666
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.428028753Z" level=info msg="starting containerd" revision=57f17b0a6295a39009d861b89e3b3b87b005ca27 version=v1.7.23
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459697772Z" level=info msg="loading plugin \"io.containerd.event.v1.exchange\"..." type=io.containerd.event.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459808074Z" level=info msg="loading plugin \"io.containerd.internal.v1.opt\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.459900976Z" level=info msg="loading plugin \"io.containerd.warning.v1.deprecations\"..." type=io.containerd.warning.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460034578Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460130779Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.blockfile\"..." error="no scratch file generator: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460318582Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460533886Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.btrfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.btrfs (ext4) must be a btrfs filesystem to be used with the btrfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460632988Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460655088Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.devmapper\"..." error="devmapper not configured: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460666288Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.native\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.460761690Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.overlayfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.461110395Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.aufs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464548052Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.aufs\"..." error="aufs is not supported (modprobe aufs failed: exit status 1 \"modprobe: FATAL: Module aufs not found in directory /lib/modules/5.10.207\\n\"): skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464657453Z" level=info msg="loading plugin \"io.containerd.snapshotter.v1.zfs\"..." type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464808056Z" level=info msg="skip loading plugin \"io.containerd.snapshotter.v1.zfs\"..." error="path /var/lib/docker/containerd/daemon/io.containerd.snapshotter.v1.zfs must be a zfs filesystem to be used with the zfs snapshotter: skip plugin" type=io.containerd.snapshotter.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.464895257Z" level=info msg="loading plugin \"io.containerd.content.v1.content\"..." type=io.containerd.content.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465004859Z" level=info msg="loading plugin \"io.containerd.metadata.v1.bolt\"..." type=io.containerd.metadata.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.465184462Z" level=info msg="metadata content store policy set" policy=shared
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489699764Z" level=info msg="loading plugin \"io.containerd.gc.v1.scheduler\"..." type=io.containerd.gc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489813666Z" level=info msg="loading plugin \"io.containerd.differ.v1.walking\"..." type=io.containerd.differ.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489849666Z" level=info msg="loading plugin \"io.containerd.lease.v1.manager\"..." type=io.containerd.lease.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489869767Z" level=info msg="loading plugin \"io.containerd.streaming.v1.manager\"..." type=io.containerd.streaming.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.489885767Z" level=info msg="loading plugin \"io.containerd.runtime.v1.linux\"..." type=io.containerd.runtime.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490007469Z" level=info msg="loading plugin \"io.containerd.monitor.v1.cgroups\"..." type=io.containerd.monitor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.490811882Z" level=info msg="loading plugin \"io.containerd.runtime.v2.task\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491014585Z" level=info msg="loading plugin \"io.containerd.runtime.v2.shim\"..." type=io.containerd.runtime.v2
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491078986Z" level=info msg="loading plugin \"io.containerd.sandbox.store.v1.local\"..." type=io.containerd.sandbox.store.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491105387Z" level=info msg="loading plugin \"io.containerd.sandbox.controller.v1.local\"..." type=io.containerd.sandbox.controller.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491131087Z" level=info msg="loading plugin \"io.containerd.service.v1.containers-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491147488Z" level=info msg="loading plugin \"io.containerd.service.v1.content-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491344591Z" level=info msg="loading plugin \"io.containerd.service.v1.diff-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491392592Z" level=info msg="loading plugin \"io.containerd.service.v1.images-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491413092Z" level=info msg="loading plugin \"io.containerd.service.v1.introspection-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491428092Z" level=info msg="loading plugin \"io.containerd.service.v1.namespaces-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491450593Z" level=info msg="loading plugin \"io.containerd.service.v1.snapshots-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491466093Z" level=info msg="loading plugin \"io.containerd.service.v1.tasks-service\"..." type=io.containerd.service.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491489593Z" level=info msg="loading plugin \"io.containerd.grpc.v1.containers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491558594Z" level=info msg="loading plugin \"io.containerd.grpc.v1.content\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491725497Z" level=info msg="loading plugin \"io.containerd.grpc.v1.diff\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491747697Z" level=info msg="loading plugin \"io.containerd.grpc.v1.events\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491765998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.images\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491781598Z" level=info msg="loading plugin \"io.containerd.grpc.v1.introspection\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491794998Z" level=info msg="loading plugin \"io.containerd.grpc.v1.leases\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491808798Z" level=info msg="loading plugin \"io.containerd.grpc.v1.namespaces\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491830599Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandbox-controllers\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491866899Z" level=info msg="loading plugin \"io.containerd.grpc.v1.sandboxes\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491885500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.snapshots\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491911500Z" level=info msg="loading plugin \"io.containerd.grpc.v1.streaming\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491928300Z" level=info msg="loading plugin \"io.containerd.grpc.v1.tasks\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.491973001Z" level=info msg="loading plugin \"io.containerd.transfer.v1.local\"..." type=io.containerd.transfer.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492030102Z" level=info msg="loading plugin \"io.containerd.grpc.v1.transfer\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492044902Z" level=info msg="loading plugin \"io.containerd.grpc.v1.version\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492057202Z" level=info msg="loading plugin \"io.containerd.internal.v1.restart\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492116803Z" level=info msg="loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492139604Z" level=info msg="skip loading plugin \"io.containerd.tracing.processor.v1.otlp\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.tracing.processor.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492390908Z" level=info msg="loading plugin \"io.containerd.internal.v1.tracing\"..." type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492518610Z" level=info msg="skip loading plugin \"io.containerd.internal.v1.tracing\"..." error="skip plugin: tracing endpoint not configured" type=io.containerd.internal.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492539110Z" level=info msg="loading plugin \"io.containerd.grpc.v1.healthcheck\"..." type=io.containerd.grpc.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492570811Z" level=info msg="loading plugin \"io.containerd.nri.v1.nri\"..." type=io.containerd.nri.v1
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.492586111Z" level=info msg="NRI interface is disabled by configuration."
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493081719Z" level=info msg=serving... address=/var/run/docker/containerd/containerd-debug.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493253822Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock.ttrpc
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493374324Z" level=info msg=serving... address=/var/run/docker/containerd/containerd.sock
	Apr 08 19:54:53 multinode-095200-m02 dockerd[666]: time="2025-04-08T19:54:53.493424225Z" level=info msg="containerd successfully booted in 0.066631s"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.461191499Z" level=info msg="[graphdriver] trying configured driver: overlay2"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.498466272Z" level=info msg="Loading containers: start."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.657747819Z" level=warning msg="ip6tables is enabled, but cannot set up ip6tables chains" error="failed to create NAT chain DOCKER: iptables failed: ip6tables --wait -t nat -N DOCKER: ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)\nPerhaps ip6tables or your kernel needs to be upgraded.\n (exit status 3)"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.924858342Z" level=info msg="Loading containers: done."
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.956989928Z" level=warning msg="WARNING: bridge-nf-call-iptables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957084329Z" level=warning msg="WARNING: bridge-nf-call-ip6tables is disabled"
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957114230Z" level=info msg="Docker daemon" commit=92a8393 containerd-snapshotter=false storage-driver=overlay2 version=27.4.0
	Apr 08 19:54:54 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:54.957267533Z" level=info msg="Daemon has completed initialization"
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092612079Z" level=info msg="API listen on /var/run/docker.sock"
	Apr 08 19:54:55 multinode-095200-m02 systemd[1]: Started Docker Application Container Engine.
	Apr 08 19:54:55 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:54:55.092720980Z" level=info msg="API listen on [::]:2376"
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.288664965Z" level=info msg="Processing signal 'terminated'"
	Apr 08 19:55:28 multinode-095200-m02 systemd[1]: Stopping Docker Application Container Engine...
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291019172Z" level=info msg="stopping event stream following graceful shutdown" error="<nil>" module=libcontainerd namespace=moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291836574Z" level=info msg="stopping healthcheck following graceful shutdown" module=libcontainerd
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291962574Z" level=info msg="stopping event stream following graceful shutdown" error="context canceled" module=libcontainerd namespace=plugins.moby
	Apr 08 19:55:28 multinode-095200-m02 dockerd[660]: time="2025-04-08T19:55:28.291853074Z" level=info msg="Daemon shutdown complete"
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: docker.service: Deactivated successfully.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Stopped Docker Application Container Engine.
	Apr 08 19:55:29 multinode-095200-m02 systemd[1]: Starting Docker Application Container Engine...
	Apr 08 19:55:29 multinode-095200-m02 dockerd[1080]: time="2025-04-08T19:55:29.349740203Z" level=info msg="Starting up"
	Apr 08 19:56:29 multinode-095200-m02 dockerd[1080]: failed to start daemon: failed to dial "/run/containerd/containerd.sock": failed to dial "/run/containerd/containerd.sock": context deadline exceeded
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Main process exited, code=exited, status=1/FAILURE
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: docker.service: Failed with result 'exit-code'.
	Apr 08 19:56:29 multinode-095200-m02 systemd[1]: Failed to start Docker Application Container Engine.
	
	-- /stdout --
	W0408 19:56:29.428252    9872 out.go:270] * 
	W0408 19:56:29.430051    9872 out.go:293] ╭─────────────────────────────────────────────────────────────────────────────────────────────╮
	│                                                                                             │
	│    * If the above advice does not help, please let us know:                                 │
	│      https://github.com/kubernetes/minikube/issues/new/choose                               │
	│                                                                                             │
	│    * Please run `minikube logs --file=logs.txt` and attach logs.txt to the GitHub issue.    │
	│                                                                                             │
	╰─────────────────────────────────────────────────────────────────────────────────────────────╯
	I0408 19:56:29.434095    9872 out.go:201] 
	
	
	==> Docker <==
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.138252536Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.174996776Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175255875Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175279075Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.175386275Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/fdb671cbb60205888706b9993e25121c92d634f486683a067f061e304de591bd/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:52:49Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/499b93d5842769d0510935c1b0ff7790dae5f4ee21472a2342c54a3fa1da8773/resolv.conf as [nameserver 172.22.32.1]"
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520484656Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520889157Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.520916057Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.521088958Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677504887Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677666087Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.677839088Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:52:49 multinode-095200 dockerd[1449]: time="2025-04-08T19:52:49.678169888Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310620301Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310792803Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.310817803Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:11.311021905Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:11 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:11Z" level=info msg="Will attempt to re-write config file /var/lib/docker/containers/24feeb6878af8f2e3d8a31bd0e2b179192ce02e8e20d04d4d523246b72a8ddfe/resolv.conf as [nameserver 10.96.0.10 search default.svc.cluster.local svc.cluster.local cluster.local options ndots:5]"
	Apr 08 19:57:13 multinode-095200 cri-dockerd[1340]: time="2025-04-08T19:57:13Z" level=info msg="Stop pulling image gcr.io/k8s-minikube/busybox:1.28: Status: Downloaded newer image for gcr.io/k8s-minikube/busybox:1.28"
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268796369Z" level=info msg="loading plugin \"io.containerd.event.v1.publisher\"..." runtime=io.containerd.runc.v2 type=io.containerd.event.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268860569Z" level=info msg="loading plugin \"io.containerd.internal.v1.shutdown\"..." runtime=io.containerd.runc.v2 type=io.containerd.internal.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.268873669Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.task\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	Apr 08 19:57:13 multinode-095200 dockerd[1449]: time="2025-04-08T19:57:13.269006570Z" level=info msg="loading plugin \"io.containerd.ttrpc.v1.pause\"..." runtime=io.containerd.runc.v2 type=io.containerd.ttrpc.v1
	
	
	==> container status <==
	CONTAINER           IMAGE                                                                                                 CREATED             STATE               NAME                      ATTEMPT             POD ID              POD
	81a5e3779adab       gcr.io/k8s-minikube/busybox@sha256:9afb80db71730dbb303fe00765cbf34bddbdc6b66e49897fc2e1861967584b12   21 minutes ago      Running             busybox                   0                   24feeb6878af8       busybox-58667487b6-jn4np
	0d50f7fd39ccf       c69fa2e9cbf5f                                                                                         26 minutes ago      Running             coredns                   0                   499b93d584276       coredns-668d6bf9bc-4tn68
	9927dca24957d       6e38f40d628db                                                                                         26 minutes ago      Running             storage-provisioner       0                   fdb671cbb6020       storage-provisioner
	d50b649f09480       kindest/kindnetd@sha256:e3c42406b0806c1f7e8a66838377936cbd2cdfd94d9b26a3eefedada8713d495              26 minutes ago      Running             kindnet-cni               0                   b2bab718ffeb8       kindnet-bx7zx
	f5f5cd010e555       f1332858868e1                                                                                         26 minutes ago      Running             kube-proxy                0                   d09051e006b07       kube-proxy-4ntql
	27fb5b70d546a       b6a454c5a800d                                                                                         26 minutes ago      Running             kube-controller-manager   0                   15cf27964e69e       kube-controller-manager-multinode-095200
	4a78eb26dd061       a9e7e6b294baf                                                                                         26 minutes ago      Running             etcd                      0                   0dd3e6bb0bc65       etcd-multinode-095200
	f825a4ac0fd12       d8e673e7c9983                                                                                         26 minutes ago      Running             kube-scheduler            0                   22e4a13c64396       kube-scheduler-multinode-095200
	2738225576c81       85b7a174738ba                                                                                         26 minutes ago      Running             kube-apiserver            0                   2e19f8c3ebf76       kube-apiserver-multinode-095200
	
	
	==> coredns [0d50f7fd39cc] <==
	.:53
	[INFO] plugin/reload: Running configuration SHA512 = 7891e5b704ef032f7013002eacbd917d45a45cb64b1d54398d75b5d9196050f49212bfcf7e87658d5f551671bba7f7546fce0966e4b6bf00350ad31999975a26
	CoreDNS-1.11.3
	linux/amd64, go1.21.11, a6338e9
	[INFO] 127.0.0.1:49640 - 3852 "HINFO IN 2634235346578504389.6293141254353391107. udp 57 false 512" NXDOMAIN qr,rd,ra 132 0.042117937s
	[INFO] 10.244.0.3:48144 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000356702s
	[INFO] 10.244.0.3:34835 - 3 "AAAA IN kubernetes.io. udp 31 false 512" NOERROR qr,rd,ra 140 0.13279721s
	[INFO] 10.244.0.3:33803 - 4 "A IN kubernetes.io. udp 31 false 512" NOERROR qr,rd 60 0.044468738s
	[INFO] 10.244.0.3:36247 - 5 "PTR IN 148.40.75.147.in-addr.arpa. udp 44 false 512" NXDOMAIN qr,rd,ra 140 1.087022914s
	[INFO] 10.244.0.3:44861 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000233501s
	[INFO] 10.244.0.3:35209 - 3 "AAAA IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.051064673s
	[INFO] 10.244.0.3:55261 - 4 "AAAA IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000271302s
	[INFO] 10.244.0.3:50891 - 5 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000226301s
	[INFO] 10.244.0.3:47466 - 6 "A IN kubernetes.default. udp 36 false 512" NXDOMAIN qr,rd,ra 111 0.029589859s
	[INFO] 10.244.0.3:56852 - 7 "A IN kubernetes.default.default.svc.cluster.local. udp 62 false 512" NXDOMAIN qr,aa,rd 155 0.000128201s
	[INFO] 10.244.0.3:53365 - 8 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000172101s
	[INFO] 10.244.0.3:58492 - 9 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000296702s
	[INFO] 10.244.0.3:41257 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000186601s
	[INFO] 10.244.0.3:33967 - 3 "AAAA IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 147 0.000235602s
	[INFO] 10.244.0.3:34652 - 4 "A IN kubernetes.default.svc.cluster.local. udp 54 false 512" NOERROR qr,aa,rd 106 0.000120301s
	[INFO] 10.244.0.3:42747 - 5 "PTR IN 1.0.96.10.in-addr.arpa. udp 40 false 512" NOERROR qr,aa,rd 112 0.000392603s
	[INFO] 10.244.0.3:34722 - 2 "PTR IN 10.0.96.10.in-addr.arpa. udp 41 false 512" NOERROR qr,aa,rd 116 0.000319201s
	[INFO] 10.244.0.3:39354 - 3 "AAAA IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 40 0.000278601s
	[INFO] 10.244.0.3:58350 - 4 "A IN host.minikube.internal. udp 40 false 512" NOERROR qr,aa,rd 78 0.0001081s
	[INFO] 10.244.0.3:46798 - 5 "PTR IN 1.32.22.172.in-addr.arpa. udp 42 false 512" NOERROR qr,aa,rd 102 0.000209101s
	
	
	==> describe nodes <==
	Name:               multinode-095200
	Roles:              control-plane
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=true
	                    minikube.k8s.io/updated_at=2025_04_08T19_52_21_0700
	                    minikube.k8s.io/version=v1.35.0
	                    node-role.kubernetes.io/control-plane=
	                    node.kubernetes.io/exclude-from-external-load-balancers=
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 19:52:16 +0000
	Taints:             <none>
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:18:43 +0000
	Conditions:
	  Type             Status  LastHeartbeatTime                 LastTransitionTime                Reason                       Message
	  ----             ------  -----------------                 ------------------                ------                       -------
	  MemoryPressure   False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientMemory   kubelet has sufficient memory available
	  DiskPressure     False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasNoDiskPressure     kubelet has no disk pressure
	  PIDPressure      False   Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:13 +0000   KubeletHasSufficientPID      kubelet has sufficient PID available
	  Ready            True    Tue, 08 Apr 2025 20:16:28 +0000   Tue, 08 Apr 2025 19:52:48 +0000   KubeletReady                 kubelet is posting ready status
	Addresses:
	  InternalIP:  172.22.37.202
	  Hostname:    multinode-095200
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 d1671845d9204bdab9b7bf235be4f077
	  System UUID:                3d89ed90-a3aa-f74e-af73-ea9b9f4d2209
	  Boot ID:                    2ee0e4fc-1948-4c6d-9549-90ef0dc3efbf
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.0.0/24
	PodCIDRs:                     10.244.0.0/24
	Non-terminated Pods:          (9 in total)
	  Namespace                   Name                                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-jn4np                    0 (0%)        0 (0%)      0 (0%)           0 (0%)         21m
	  kube-system                 coredns-668d6bf9bc-4tn68                    100m (5%)     0 (0%)      70Mi (3%)        170Mi (8%)     26m
	  kube-system                 etcd-multinode-095200                       100m (5%)     0 (0%)      100Mi (4%)       0 (0%)         26m
	  kube-system                 kindnet-bx7zx                               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      26m
	  kube-system                 kube-apiserver-multinode-095200             250m (12%)    0 (0%)      0 (0%)           0 (0%)         26m
	  kube-system                 kube-controller-manager-multinode-095200    200m (10%)    0 (0%)      0 (0%)           0 (0%)         26m
	  kube-system                 kube-proxy-4ntql                            0 (0%)        0 (0%)      0 (0%)           0 (0%)         26m
	  kube-system                 kube-scheduler-multinode-095200             100m (5%)     0 (0%)      0 (0%)           0 (0%)         26m
	  kube-system                 storage-provisioner                         0 (0%)        0 (0%)      0 (0%)           0 (0%)         26m
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests     Limits
	  --------           --------     ------
	  cpu                850m (42%)   100m (5%)
	  memory             220Mi (10%)  220Mi (10%)
	  ephemeral-storage  0 (0%)       0 (0%)
	  hugepages-2Mi      0 (0%)       0 (0%)
	Events:
	  Type    Reason                   Age   From             Message
	  ----    ------                   ----  ----             -------
	  Normal  Starting                 26m   kube-proxy       
	  Normal  Starting                 26m   kubelet          Starting kubelet.
	  Normal  NodeAllocatableEnforced  26m   kubelet          Updated Node Allocatable limit across pods
	  Normal  NodeHasSufficientMemory  26m   kubelet          Node multinode-095200 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    26m   kubelet          Node multinode-095200 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     26m   kubelet          Node multinode-095200 status is now: NodeHasSufficientPID
	  Normal  RegisteredNode           26m   node-controller  Node multinode-095200 event: Registered Node multinode-095200 in Controller
	  Normal  NodeReady                26m   kubelet          Node multinode-095200 status is now: NodeReady
	
	
	Name:               multinode-095200-m03
	Roles:              <none>
	Labels:             beta.kubernetes.io/arch=amd64
	                    beta.kubernetes.io/os=linux
	                    kubernetes.io/arch=amd64
	                    kubernetes.io/hostname=multinode-095200-m03
	                    kubernetes.io/os=linux
	                    minikube.k8s.io/commit=00fec7ad00298ce3ccd71a2d57a7f829f082fec8
	                    minikube.k8s.io/name=multinode-095200
	                    minikube.k8s.io/primary=false
	                    minikube.k8s.io/updated_at=2025_04_08T20_13_27_0700
	                    minikube.k8s.io/version=v1.35.0
	Annotations:        kubeadm.alpha.kubernetes.io/cri-socket: unix:///var/run/cri-dockerd.sock
	                    node.alpha.kubernetes.io/ttl: 0
	                    volumes.kubernetes.io/controller-managed-attach-detach: true
	CreationTimestamp:  Tue, 08 Apr 2025 20:13:26 +0000
	Taints:             node.kubernetes.io/unreachable:NoExecute
	                    node.kubernetes.io/unreachable:NoSchedule
	Unschedulable:      false
	Lease:
	  HolderIdentity:  multinode-095200-m03
	  AcquireTime:     <unset>
	  RenewTime:       Tue, 08 Apr 2025 20:17:12 +0000
	Conditions:
	  Type             Status    LastHeartbeatTime                 LastTransitionTime                Reason              Message
	  ----             ------    -----------------                 ------------------                ------              -------
	  MemoryPressure   Unknown   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:18:04 +0000   NodeStatusUnknown   Kubelet stopped posting node status.
	  DiskPressure     Unknown   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:18:04 +0000   NodeStatusUnknown   Kubelet stopped posting node status.
	  PIDPressure      Unknown   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:18:04 +0000   NodeStatusUnknown   Kubelet stopped posting node status.
	  Ready            Unknown   Tue, 08 Apr 2025 20:14:28 +0000   Tue, 08 Apr 2025 20:18:04 +0000   NodeStatusUnknown   Kubelet stopped posting node status.
	Addresses:
	  InternalIP:  172.22.39.251
	  Hostname:    multinode-095200-m03
	Capacity:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	Allocatable:
	  cpu:                2
	  ephemeral-storage:  17734596Ki
	  hugepages-2Mi:      0
	  memory:             2164264Ki
	  pods:               110
	System Info:
	  Machine ID:                 5dd08273ae3d422da88197050793897d
	  System UUID:                eb5f1aa3-1dba-eb43-ad9d-e1ed36b898a1
	  Boot ID:                    d4cb601e-8968-4e50-b6e7-defe0d03ea49
	  Kernel Version:             5.10.207
	  OS Image:                   Buildroot 2023.02.9
	  Operating System:           linux
	  Architecture:               amd64
	  Container Runtime Version:  docker://27.4.0
	  Kubelet Version:            v1.32.2
	  Kube-Proxy Version:         v1.32.2
	PodCIDR:                      10.244.1.0/24
	PodCIDRs:                     10.244.1.0/24
	Non-terminated Pods:          (3 in total)
	  Namespace                   Name                        CPU Requests  CPU Limits  Memory Requests  Memory Limits  Age
	  ---------                   ----                        ------------  ----------  ---------------  -------------  ---
	  default                     busybox-58667487b6-js7bg    0 (0%)        0 (0%)      0 (0%)           0 (0%)         21m
	  kube-system                 kindnet-4gvks               100m (5%)     100m (5%)   50Mi (2%)        50Mi (2%)      5m26s
	  kube-system                 kube-proxy-fcgz7            0 (0%)        0 (0%)      0 (0%)           0 (0%)         5m26s
	Allocated resources:
	  (Total limits may be over 100 percent, i.e., overcommitted.)
	  Resource           Requests   Limits
	  --------           --------   ------
	  cpu                100m (5%)  100m (5%)
	  memory             50Mi (2%)  50Mi (2%)
	  ephemeral-storage  0 (0%)     0 (0%)
	  hugepages-2Mi      0 (0%)     0 (0%)
	Events:
	  Type    Reason                   Age                    From             Message
	  ----    ------                   ----                   ----             -------
	  Normal  Starting                 5m12s                  kube-proxy       
	  Normal  NodeHasSufficientMemory  5m26s (x2 over 5m26s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientMemory
	  Normal  NodeHasNoDiskPressure    5m26s (x2 over 5m26s)  kubelet          Node multinode-095200-m03 status is now: NodeHasNoDiskPressure
	  Normal  NodeHasSufficientPID     5m26s (x2 over 5m26s)  kubelet          Node multinode-095200-m03 status is now: NodeHasSufficientPID
	  Normal  NodeAllocatableEnforced  5m26s                  kubelet          Updated Node Allocatable limit across pods
	  Normal  RegisteredNode           5m23s                  node-controller  Node multinode-095200-m03 event: Registered Node multinode-095200-m03 in Controller
	  Normal  NodeReady                4m53s                  kubelet          Node multinode-095200-m03 status is now: NodeReady
	  Normal  NodeNotReady             48s                    node-controller  Node multinode-095200-m03 status is now: NodeNotReady
	
	
	==> dmesg <==
	[  +7.092780] NFSD: Using /var/lib/nfs/v4recovery as the NFSv4 state recovery directory
	[  +0.000007] NFSD: unable to find recovery directory /var/lib/nfs/v4recovery
	[  +0.000002] NFSD: Unable to initialize client recovery tracking! (-2)
	[Apr 8 19:51] systemd-fstab-generator[639]: Ignoring "noauto" option for root device
	[  +0.186506] systemd-fstab-generator[651]: Ignoring "noauto" option for root device
	[ +33.736558] systemd-fstab-generator[1005]: Ignoring "noauto" option for root device
	[  +0.119307] kauditd_printk_skb: 69 callbacks suppressed
	[  +0.538847] systemd-fstab-generator[1043]: Ignoring "noauto" option for root device
	[  +0.201737] systemd-fstab-generator[1055]: Ignoring "noauto" option for root device
	[  +0.250503] systemd-fstab-generator[1070]: Ignoring "noauto" option for root device
	[  +2.931666] systemd-fstab-generator[1293]: Ignoring "noauto" option for root device
	[  +0.234030] systemd-fstab-generator[1305]: Ignoring "noauto" option for root device
	[  +0.212776] systemd-fstab-generator[1317]: Ignoring "noauto" option for root device
	[  +0.289443] systemd-fstab-generator[1332]: Ignoring "noauto" option for root device
	[Apr 8 19:52] systemd-fstab-generator[1435]: Ignoring "noauto" option for root device
	[  +0.123685] kauditd_printk_skb: 206 callbacks suppressed
	[  +3.585223] systemd-fstab-generator[1693]: Ignoring "noauto" option for root device
	[  +6.405761] systemd-fstab-generator[1839]: Ignoring "noauto" option for root device
	[  +0.098848] kauditd_printk_skb: 74 callbacks suppressed
	[  +9.056345] systemd-fstab-generator[2264]: Ignoring "noauto" option for root device
	[  +0.141529] kauditd_printk_skb: 62 callbacks suppressed
	[  +5.712567] systemd-fstab-generator[2366]: Ignoring "noauto" option for root device
	[  +0.231070] kauditd_printk_skb: 12 callbacks suppressed
	[ +10.420333] kauditd_printk_skb: 51 callbacks suppressed
	[Apr 8 19:57] kauditd_printk_skb: 14 callbacks suppressed
	
	
	==> etcd [4a78eb26dd06] <==
	{"level":"info","ts":"2025-04-08T20:02:14.012096Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":650,"took":"16.853983ms","hash":4142537579,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":2072576,"current-db-size-in-use":"2.1 MB"}
	{"level":"info","ts":"2025-04-08T20:02:14.012321Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":4142537579,"revision":650,"compact-revision":-1}
	{"level":"info","ts":"2025-04-08T20:07:14.017211Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":892}
	{"level":"info","ts":"2025-04-08T20:07:14.030836Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":892,"took":"13.37027ms","hash":3377129079,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1581056,"current-db-size-in-use":"1.6 MB"}
	{"level":"info","ts":"2025-04-08T20:07:14.030906Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3377129079,"revision":892,"compact-revision":650}
	{"level":"info","ts":"2025-04-08T20:10:48.783716Z","caller":"traceutil/trace.go:171","msg":"trace[1418995672] transaction","detail":"{read_only:false; response_revision:1304; number_of_response:1; }","duration":"172.026431ms","start":"2025-04-08T20:10:48.611671Z","end":"2025-04-08T20:10:48.783698Z","steps":["trace[1418995672] 'process raft request'  (duration: 171.88573ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:10:51.233244Z","caller":"traceutil/trace.go:171","msg":"trace[1434363476] transaction","detail":"{read_only:false; response_revision:1307; number_of_response:1; }","duration":"154.120134ms","start":"2025-04-08T20:10:51.079104Z","end":"2025-04-08T20:10:51.233224Z","steps":["trace[1434363476] 'process raft request'  (duration: 153.912433ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:11:50.092603Z","caller":"traceutil/trace.go:171","msg":"trace[1794927297] transaction","detail":"{read_only:false; response_revision:1355; number_of_response:1; }","duration":"207.047428ms","start":"2025-04-08T20:11:49.885538Z","end":"2025-04-08T20:11:50.092585Z","steps":["trace[1794927297] 'process raft request'  (duration: 206.952227ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:12:14.034956Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1132}
	{"level":"info","ts":"2025-04-08T20:12:14.043791Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":1132,"took":"8.264645ms","hash":3480195930,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1519616,"current-db-size-in-use":"1.5 MB"}
	{"level":"info","ts":"2025-04-08T20:12:14.043914Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3480195930,"revision":1132,"compact-revision":892}
	{"level":"info","ts":"2025-04-08T20:13:19.538892Z","caller":"traceutil/trace.go:171","msg":"trace[539230204] transaction","detail":"{read_only:false; response_revision:1427; number_of_response:1; }","duration":"196.84768ms","start":"2025-04-08T20:13:19.342005Z","end":"2025-04-08T20:13:19.538853Z","steps":["trace[539230204] 'process raft request'  (duration: 140.495971ms)","trace[539230204] 'compare'  (duration: 56.215708ms)"],"step_count":2}
	{"level":"info","ts":"2025-04-08T20:13:37.385572Z","caller":"traceutil/trace.go:171","msg":"trace[614883105] transaction","detail":"{read_only:false; response_revision:1480; number_of_response:1; }","duration":"116.69824ms","start":"2025-04-08T20:13:37.268859Z","end":"2025-04-08T20:13:37.385557Z","steps":["trace[614883105] 'process raft request'  (duration: 116.60574ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:13:37.497911Z","caller":"traceutil/trace.go:171","msg":"trace[1757030073] transaction","detail":"{read_only:false; response_revision:1481; number_of_response:1; }","duration":"112.257716ms","start":"2025-04-08T20:13:37.385615Z","end":"2025-04-08T20:13:37.497873Z","steps":["trace[1757030073] 'process raft request'  (duration: 44.166642ms)","trace[1757030073] 'compare'  (duration: 67.877973ms)"],"step_count":2}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"283.043654ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/jobs/\" range_end:\"/registry/jobs0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132713Z","caller":"traceutil/trace.go:171","msg":"trace[488800171] range","detail":"{range_begin:/registry/jobs/; range_end:/registry/jobs0; response_count:0; response_revision:1492; }","duration":"283.247756ms","start":"2025-04-08T20:13:42.849451Z","end":"2025-04-08T20:13:43.132699Z","steps":["trace[488800171] 'count revisions from in-memory index tree'  (duration: 282.917554ms)"],"step_count":1}
	{"level":"warn","ts":"2025-04-08T20:13:43.132605Z","caller":"etcdserver/util.go:170","msg":"apply request took too long","took":"126.454095ms","expected-duration":"100ms","prefix":"read-only range ","request":"key:\"/registry/persistentvolumeclaims/\" range_end:\"/registry/persistentvolumeclaims0\" count_only:true ","response":"range_response_count:0 size:5"}
	{"level":"info","ts":"2025-04-08T20:13:43.132897Z","caller":"traceutil/trace.go:171","msg":"trace[1026523730] range","detail":"{range_begin:/registry/persistentvolumeclaims/; range_end:/registry/persistentvolumeclaims0; response_count:0; response_revision:1492; }","duration":"126.801497ms","start":"2025-04-08T20:13:43.006085Z","end":"2025-04-08T20:13:43.132887Z","steps":["trace[1026523730] 'count revisions from in-memory index tree'  (duration: 126.320694ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:17:14.054454Z","caller":"mvcc/index.go:214","msg":"compact tree index","revision":1374}
	{"level":"info","ts":"2025-04-08T20:17:14.066920Z","caller":"mvcc/kvstore_compaction.go:72","msg":"finished scheduled compaction","compact-revision":1374,"took":"11.919966ms","hash":3639547689,"current-db-size-bytes":2072576,"current-db-size":"2.1 MB","current-db-size-in-use-bytes":1732608,"current-db-size-in-use":"1.7 MB"}
	{"level":"info","ts":"2025-04-08T20:17:14.067032Z","caller":"mvcc/hash.go:151","msg":"storing new hash","hash":3639547689,"revision":1374,"compact-revision":1132}
	{"level":"info","ts":"2025-04-08T20:17:34.810002Z","caller":"traceutil/trace.go:171","msg":"trace[329511236] transaction","detail":"{read_only:false; response_revision:1720; number_of_response:1; }","duration":"124.372791ms","start":"2025-04-08T20:17:34.685609Z","end":"2025-04-08T20:17:34.809982Z","steps":["trace[329511236] 'process raft request'  (duration: 124.250791ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:17:37.029199Z","caller":"traceutil/trace.go:171","msg":"trace[227903822] transaction","detail":"{read_only:false; response_revision:1721; number_of_response:1; }","duration":"208.643161ms","start":"2025-04-08T20:17:36.820538Z","end":"2025-04-08T20:17:37.029181Z","steps":["trace[227903822] 'process raft request'  (duration: 208.49126ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:17:37.041362Z","caller":"traceutil/trace.go:171","msg":"trace[1133989445] transaction","detail":"{read_only:false; response_revision:1722; number_of_response:1; }","duration":"147.029818ms","start":"2025-04-08T20:17:36.894316Z","end":"2025-04-08T20:17:37.041346Z","steps":["trace[1133989445] 'process raft request'  (duration: 146.902717ms)"],"step_count":1}
	{"level":"info","ts":"2025-04-08T20:17:39.174638Z","caller":"traceutil/trace.go:171","msg":"trace[1814722059] transaction","detail":"{read_only:false; response_revision:1723; number_of_response:1; }","duration":"136.036157ms","start":"2025-04-08T20:17:39.038582Z","end":"2025-04-08T20:17:39.174618Z","steps":["trace[1814722059] 'process raft request'  (duration: 135.872656ms)"],"step_count":1}
	
	
	==> kernel <==
	 20:18:52 up 28 min,  0 users,  load average: 0.00, 0.30, 0.47
	Linux multinode-095200 5.10.207 #1 SMP Tue Jan 14 08:15:54 UTC 2025 x86_64 GNU/Linux
	PRETTY_NAME="Buildroot 2023.02.9"
	
	
	==> kindnet [d50b649f0948] <==
	I0408 20:17:47.235149       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:17:57.237218       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:17:57.237332       1 main.go:301] handling current node
	I0408 20:17:57.237355       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:17:57.237382       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:18:07.228553       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:18:07.229018       1 main.go:301] handling current node
	I0408 20:18:07.229126       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:18:07.229383       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:18:17.233458       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:18:17.233610       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:18:17.234099       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:18:17.234246       1 main.go:301] handling current node
	I0408 20:18:27.235390       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:18:27.235540       1 main.go:301] handling current node
	I0408 20:18:27.235579       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:18:27.235601       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:18:37.227765       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:18:37.227864       1 main.go:301] handling current node
	I0408 20:18:37.227885       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:18:37.227892       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	I0408 20:18:47.229758       1 main.go:297] Handling node with IPs: map[172.22.37.202:{}]
	I0408 20:18:47.229797       1 main.go:301] handling current node
	I0408 20:18:47.229814       1 main.go:297] Handling node with IPs: map[172.22.39.251:{}]
	I0408 20:18:47.229820       1 main.go:324] Node multinode-095200-m03 has CIDR [10.244.1.0/24] 
	
	
	==> kube-apiserver [2738225576c8] <==
	I0408 19:52:16.798560       1 cache.go:32] Waiting for caches to sync for autoregister controller
	I0408 19:52:16.798567       1 cache.go:39] Caches are synced for autoregister controller
	I0408 19:52:16.820219       1 controller.go:615] quota admission added evaluator for: leases.coordination.k8s.io
	I0408 19:52:17.527655       1 storage_scheduling.go:95] created PriorityClass system-node-critical with value 2000001000
	I0408 19:52:17.540440       1 storage_scheduling.go:95] created PriorityClass system-cluster-critical with value 2000000000
	I0408 19:52:17.540538       1 storage_scheduling.go:111] all system priority classes are created successfully or already exist.
	I0408 19:52:18.780586       1 controller.go:615] quota admission added evaluator for: roles.rbac.authorization.k8s.io
	I0408 19:52:18.888578       1 controller.go:615] quota admission added evaluator for: rolebindings.rbac.authorization.k8s.io
	I0408 19:52:19.070368       1 alloc.go:330] "allocated clusterIPs" service="default/kubernetes" clusterIPs={"IPv4":"10.96.0.1"}
	W0408 19:52:19.112012       1 lease.go:265] Resetting endpoints for master service "kubernetes" to [172.22.37.202]
	I0408 19:52:19.114450       1 controller.go:615] quota admission added evaluator for: endpoints
	I0408 19:52:19.150802       1 controller.go:615] quota admission added evaluator for: endpointslices.discovery.k8s.io
	I0408 19:52:19.616179       1 controller.go:615] quota admission added evaluator for: serviceaccounts
	I0408 19:52:19.916751       1 controller.go:615] quota admission added evaluator for: deployments.apps
	I0408 19:52:19.971359       1 alloc.go:330] "allocated clusterIPs" service="kube-system/kube-dns" clusterIPs={"IPv4":"10.96.0.10"}
	I0408 19:52:19.993826       1 controller.go:615] quota admission added evaluator for: daemonsets.apps
	I0408 19:52:24.963413       1 controller.go:615] quota admission added evaluator for: replicasets.apps
	I0408 19:52:25.175029       1 controller.go:615] quota admission added evaluator for: controllerrevisions.apps
	E0408 20:08:59.828087       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59231: use of closed network connection
	E0408 20:09:00.905990       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59236: use of closed network connection
	E0408 20:09:01.943484       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59241: use of closed network connection
	E0408 20:09:40.196961       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59261: use of closed network connection
	E0408 20:09:50.729406       1 conn.go:339] Error on socket receive: read tcp 172.22.37.202:8443->172.22.32.1:59263: use of closed network connection
	E0408 20:13:27.732740       1 wrap.go:53] "Timeout or abort while handling" logger="UnhandledError" method="GET" URI="/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0" auditID="25aea89a-4448-4253-aeac-bde38ac93ff6"
	E0408 20:13:27.732687       1 timeout.go:140] "Post-timeout activity" logger="UnhandledError" timeElapsed="9.4µs" method="GET" path="/api/v1/services" result=null
	
	
	==> kube-controller-manager [27fb5b70d546] <==
	I0408 20:13:26.989485       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="41.6µs"
	I0408 20:13:27.001442       1 range_allocator.go:428] "Set node PodCIDR" logger="node-ipam-controller" node="multinode-095200-m03" podCIDRs=["10.244.1.0/24"]
	I0408 20:13:27.001547       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.001576       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.359144       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:27.950840       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:29.495284       1 node_lifecycle_controller.go:886] "Missing timestamp for Node. Assuming now as a timestamp" logger="node-lifecycle-controller" node="multinode-095200-m03"
	I0408 20:13:29.512314       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:37.387903       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:57.842537       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.730120       1 topologycache.go:237] "Can't get CPU or zone information for node" logger="endpointslice-controller" node="multinode-095200-m03"
	I0408 20:13:59.731192       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.755844       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:13:59.769950       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="121.201µs"
	I0408 20:13:59.789908       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="69.4µs"
	I0408 20:14:02.549158       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="14.149878ms"
	I0408 20:14:02.549975       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="90.9µs"
	I0408 20:14:04.522546       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:14:28.386936       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:16:28.461548       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200"
	I0408 20:18:04.595382       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:18:04.622639       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	I0408 20:18:04.629460       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="16.949694ms"
	I0408 20:18:04.630444       1 replica_set.go:679] "Finished syncing" logger="replicaset-controller" kind="ReplicaSet" key="default/busybox-58667487b6" duration="932.905µs"
	I0408 20:18:09.705437       1 range_allocator.go:247] "Successfully synced" logger="node-ipam-controller" key="multinode-095200-m03"
	
	
	==> kube-proxy [f5f5cd010e55] <==
		add table ip kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	E0408 19:52:28.734721       1 proxier.go:733] "Error cleaning up nftables rules" err=<
		could not run nftables command: /dev/stdin:1:1-25: Error: Could not process rule: Operation not supported
		add table ip6 kube-proxy
		^^^^^^^^^^^^^^^^^^^^^^^^^
	 >
	I0408 19:52:28.792628       1 server.go:698] "Successfully retrieved node IP(s)" IPs=["172.22.37.202"]
	E0408 19:52:28.792852       1 server.go:234] "Kube-proxy configuration may be incomplete or incorrect" err="nodePortAddresses is unset; NodePort connections will be accepted on all local IPs. Consider using `--nodeport-addresses primary`"
	I0408 19:52:28.861255       1 server_linux.go:147] "No iptables support for family" ipFamily="IPv6"
	I0408 19:52:28.861355       1 server.go:245] "kube-proxy running in single-stack mode" ipFamily="IPv4"
	I0408 19:52:28.861721       1 server_linux.go:170] "Using iptables Proxier"
	I0408 19:52:28.867011       1 proxier.go:255] "Setting route_localnet=1 to allow node-ports on localhost; to change this either disable iptables.localhostNodePorts (--iptables-localhost-nodeports) or set nodePortAddresses (--nodeport-addresses) to filter loopback addresses" ipFamily="IPv4"
	I0408 19:52:28.869309       1 server.go:497] "Version info" version="v1.32.2"
	I0408 19:52:28.869521       1 server.go:499] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK=""
	I0408 19:52:28.875177       1 config.go:199] "Starting service config controller"
	I0408 19:52:28.875549       1 config.go:105] "Starting endpoint slice config controller"
	I0408 19:52:28.876936       1 shared_informer.go:313] Waiting for caches to sync for service config
	I0408 19:52:28.877198       1 shared_informer.go:313] Waiting for caches to sync for endpoint slice config
	I0408 19:52:28.880296       1 config.go:329] "Starting node config controller"
	I0408 19:52:28.880469       1 shared_informer.go:313] Waiting for caches to sync for node config
	I0408 19:52:28.978261       1 shared_informer.go:320] Caches are synced for service config
	I0408 19:52:28.979808       1 shared_informer.go:320] Caches are synced for endpoint slice config
	I0408 19:52:28.980711       1 shared_informer.go:320] Caches are synced for node config
	
	
	==> kube-scheduler [f825a4ac0fd1] <==
	W0408 19:52:17.123617       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123643       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123709       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.123749       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123806       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:17.123844       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.123907       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User "system:kube-scheduler" cannot list resource "replicasets" in API group "apps" at the cluster scope
	E0408 19:52:17.123945       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicaSet: failed to list *v1.ReplicaSet: replicasets.apps is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicasets\" in API group \"apps\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.969351       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "csistoragecapacities" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.969409       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIStorageCapacity: failed to list *v1.CSIStorageCapacity: csistoragecapacities.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"csistoragecapacities\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:17.990123       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "volumeattachments" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:17.990233       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.VolumeAttachment: failed to list *v1.VolumeAttachment: volumeattachments.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"volumeattachments\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.047122       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumes" in API group "" at the cluster scope
	E0408 19:52:18.049011       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolume: failed to list *v1.PersistentVolume: persistentvolumes is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumes\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.256615       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User "system:kube-scheduler" cannot list resource "replicationcontrollers" in API group "" at the cluster scope
	E0408 19:52:18.256846       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.ReplicationController: failed to list *v1.ReplicationController: replicationcontrollers is forbidden: User \"system:kube-scheduler\" cannot list resource \"replicationcontrollers\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.309190       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User "system:kube-scheduler" cannot list resource "persistentvolumeclaims" in API group "" at the cluster scope
	E0408 19:52:18.309310       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.PersistentVolumeClaim: failed to list *v1.PersistentVolumeClaim: persistentvolumeclaims is forbidden: User \"system:kube-scheduler\" cannot list resource \"persistentvolumeclaims\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.324559       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:kube-scheduler" cannot list resource "services" in API group "" at the cluster scope
	E0408 19:52:18.324673       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:kube-scheduler\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.346509       1 reflector.go:569] k8s.io/client-go/informers/factory.go:160: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User "system:kube-scheduler" cannot list resource "storageclasses" in API group "storage.k8s.io" at the cluster scope
	E0408 19:52:18.346574       1 reflector.go:166] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.StorageClass: failed to list *v1.StorageClass: storageclasses.storage.k8s.io is forbidden: User \"system:kube-scheduler\" cannot list resource \"storageclasses\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError"
	W0408 19:52:18.630731       1 reflector.go:569] runtime/asm_amd64.s:1700: failed to list *v1.ConfigMap: configmaps "extension-apiserver-authentication" is forbidden: User "system:kube-scheduler" cannot list resource "configmaps" in API group "" in the namespace "kube-system"
	E0408 19:52:18.630980       1 reflector.go:166] "Unhandled Error" err="runtime/asm_amd64.s:1700: Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"extension-apiserver-authentication\" is forbidden: User \"system:kube-scheduler\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"kube-system\"" logger="UnhandledError"
	I0408 19:52:20.300510       1 shared_informer.go:320] Caches are synced for client-ca::kube-system::extension-apiserver-authentication::client-ca-file
	
	
	==> kubelet <==
	Apr 08 20:14:20 multinode-095200 kubelet[2271]: E0408 20:14:20.020995    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:14:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:15:20 multinode-095200 kubelet[2271]: E0408 20:15:20.021786    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:15:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:16:20 multinode-095200 kubelet[2271]: E0408 20:16:20.020745    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:16:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:17:20 multinode-095200 kubelet[2271]: E0408 20:17:20.021313    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:17:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:17:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:17:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:17:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	Apr 08 20:18:20 multinode-095200 kubelet[2271]: E0408 20:18:20.021766    2271 iptables.go:577] "Could not set up iptables canary" err=<
	Apr 08 20:18:20 multinode-095200 kubelet[2271]:         error creating chain "KUBE-KUBELET-CANARY": exit status 3: Ignoring deprecated --wait-interval option.
	Apr 08 20:18:20 multinode-095200 kubelet[2271]:         ip6tables v1.8.9 (legacy): can't initialize ip6tables table `nat': Table does not exist (do you need to insmod?)
	Apr 08 20:18:20 multinode-095200 kubelet[2271]:         Perhaps ip6tables or your kernel needs to be upgraded.
	Apr 08 20:18:20 multinode-095200 kubelet[2271]:  > table="nat" chain="KUBE-KUBELET-CANARY"
	

                                                
                                                
-- /stdout --
helpers_test.go:254: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200
helpers_test.go:254: (dbg) Done: out/minikube-windows-amd64.exe status --format={{.APIServer}} -p multinode-095200 -n multinode-095200: (12.3139994s)
helpers_test.go:261: (dbg) Run:  kubectl --context multinode-095200 get po -o=jsonpath={.items[*].metadata.name} -A --field-selector=status.phase!=Running
helpers_test.go:285: <<< TestMultiNode/serial/StopNode FAILED: end of post-mortem logs <<<
helpers_test.go:286: ---------------------/post-mortem---------------------------------
--- FAIL: TestMultiNode/serial/StopNode (123.88s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartWithK8s (299.9s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartWithK8s
no_kubernetes_test.go:95: (dbg) Run:  out/minikube-windows-amd64.exe start -p NoKubernetes-632800 --driver=hyperv
E0408 20:36:52.285887    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
no_kubernetes_test.go:95: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p NoKubernetes-632800 --driver=hyperv: exit status 1 (4m59.6110293s)

                                                
                                                
-- stdout --
	* [NoKubernetes-632800] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	* Using the hyperv driver based on user configuration
	* Starting "NoKubernetes-632800" primary control-plane node in "NoKubernetes-632800" cluster
	* Creating hyperv VM (CPUs=2, Memory=6000MB, Disk=20000MB) ...

                                                
                                                
-- /stdout --
no_kubernetes_test.go:97: failed to start minikube with args: "out/minikube-windows-amd64.exe start -p NoKubernetes-632800 --driver=hyperv" : exit status 1
helpers_test.go:222: -----------------------post-mortem--------------------------------
helpers_test.go:239: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p NoKubernetes-632800 -n NoKubernetes-632800
helpers_test.go:239: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p NoKubernetes-632800 -n NoKubernetes-632800: exit status 7 (283.901ms)

                                                
                                                
-- stdout --
	Nonexistent

                                                
                                                
-- /stdout --
helpers_test.go:239: status error: exit status 7 (may be ok)
helpers_test.go:241: "NoKubernetes-632800" host is not running, skipping log retrieval (state="Nonexistent")
--- FAIL: TestNoKubernetes/serial/StartWithK8s (299.90s)

                                                
                                    

Test pass (123/193)

Order passed test Duration
3 TestDownloadOnly/v1.20.0/json-events 19.01
4 TestDownloadOnly/v1.20.0/preload-exists 0.09
7 TestDownloadOnly/v1.20.0/kubectl 0
8 TestDownloadOnly/v1.20.0/LogsDuration 0.3
9 TestDownloadOnly/v1.20.0/DeleteAll 0.82
10 TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds 0.72
12 TestDownloadOnly/v1.32.2/json-events 13.82
13 TestDownloadOnly/v1.32.2/preload-exists 0
16 TestDownloadOnly/v1.32.2/kubectl 0
17 TestDownloadOnly/v1.32.2/LogsDuration 0.43
18 TestDownloadOnly/v1.32.2/DeleteAll 0.68
19 TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds 0.79
21 TestBinaryMirror 9.84
22 TestOffline 258.36
25 TestAddons/PreSetup/EnablingAddonOnNonExistingCluster 0.3
26 TestAddons/PreSetup/DisablingAddonOnNonExistingCluster 0.28
27 TestAddons/Setup 441.52
29 TestAddons/serial/Volcano 66.05
31 TestAddons/serial/GCPAuth/Namespaces 0.36
32 TestAddons/serial/GCPAuth/FakeCredentials 10.63
35 TestAddons/parallel/Registry 37.62
36 TestAddons/parallel/Ingress 67.44
37 TestAddons/parallel/InspektorGadget 27.42
38 TestAddons/parallel/MetricsServer 23.74
40 TestAddons/parallel/CSI 85.26
41 TestAddons/parallel/Headlamp 50.25
42 TestAddons/parallel/CloudSpanner 22.27
43 TestAddons/parallel/LocalPath 46.82
44 TestAddons/parallel/NvidiaDevicePlugin 23.09
45 TestAddons/parallel/Yakd 27.94
47 TestAddons/StoppedEnableDisable 53.78
51 TestForceSystemdFlag 405.81
52 TestForceSystemdEnv 552.52
59 TestErrorSpam/start 17.39
60 TestErrorSpam/status 37.69
61 TestErrorSpam/pause 23.58
62 TestErrorSpam/unpause 23.67
63 TestErrorSpam/stop 63.21
66 TestFunctional/serial/CopySyncFile 0.05
67 TestFunctional/serial/StartWithProxy 232.32
68 TestFunctional/serial/AuditLog 0
69 TestFunctional/serial/SoftStart 129.05
70 TestFunctional/serial/KubeContext 0.13
71 TestFunctional/serial/KubectlGetPods 0.24
74 TestFunctional/serial/CacheCmd/cache/add_remote 26.73
75 TestFunctional/serial/CacheCmd/cache/add_local 10.73
76 TestFunctional/serial/CacheCmd/cache/CacheDelete 0.27
77 TestFunctional/serial/CacheCmd/cache/list 0.27
78 TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node 9.62
79 TestFunctional/serial/CacheCmd/cache/cache_reload 37.09
80 TestFunctional/serial/CacheCmd/cache/delete 0.54
81 TestFunctional/serial/MinikubeKubectlCmd 0.52
82 TestFunctional/serial/MinikubeKubectlCmdDirectly 3.01
85 TestFunctional/serial/LogsCmd 168.2
86 TestFunctional/serial/LogsFileCmd 180.75
89 TestFunctional/parallel/ConfigCmd 1.84
98 TestFunctional/parallel/AddonsCmd 0.62
101 TestFunctional/parallel/SSHCmd 19.21
102 TestFunctional/parallel/CpCmd 62.45
104 TestFunctional/parallel/FileSync 9.43
105 TestFunctional/parallel/CertSync 57.94
111 TestFunctional/parallel/NonActiveRuntimeDisabled 9.6
113 TestFunctional/parallel/License 1.61
115 TestFunctional/parallel/ProfileCmd/profile_not_create 15.17
118 TestFunctional/parallel/ProfileCmd/profile_list 14.87
121 TestFunctional/parallel/ProfileCmd/profile_json_output 14.35
125 TestFunctional/parallel/TunnelCmd/serial/StartTunnel 0.01
132 TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel 0.11
133 TestFunctional/parallel/Version/short 0.27
134 TestFunctional/parallel/Version/components 8.14
140 TestFunctional/parallel/ImageCommands/Setup 2.04
146 TestFunctional/parallel/ImageCommands/ImageRemove 120.43
147 TestFunctional/parallel/UpdateContextCmd/no_changes 2.57
148 TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster 2.59
149 TestFunctional/parallel/UpdateContextCmd/no_clusters 2.65
151 TestFunctional/parallel/ImageCommands/ImageSaveDaemon 60.23
152 TestFunctional/delete_echo-server_images 0.2
153 TestFunctional/delete_my-image_image 0.1
154 TestFunctional/delete_minikube_cached_images 0.09
159 TestMultiControlPlane/serial/StartCluster 727.03
160 TestMultiControlPlane/serial/DeployApp 14.41
162 TestMultiControlPlane/serial/AddWorkerNode 272.26
163 TestMultiControlPlane/serial/NodeLabels 0.19
164 TestMultiControlPlane/serial/HAppyAfterClusterStart 50.05
165 TestMultiControlPlane/serial/CopyFile 653.38
169 TestImageBuild/serial/Setup 200.14
170 TestImageBuild/serial/NormalBuild 10.75
171 TestImageBuild/serial/BuildWithBuildArg 8.97
172 TestImageBuild/serial/BuildWithDockerIgnore 8.24
173 TestImageBuild/serial/BuildWithSpecifiedDockerfile 8.3
177 TestJSONOutput/start/Command 233.95
178 TestJSONOutput/start/Audit 0
180 TestJSONOutput/start/parallel/DistinctCurrentSteps 0
181 TestJSONOutput/start/parallel/IncreasingCurrentSteps 0
183 TestJSONOutput/pause/Command 8.19
184 TestJSONOutput/pause/Audit 0
186 TestJSONOutput/pause/parallel/DistinctCurrentSteps 0
187 TestJSONOutput/pause/parallel/IncreasingCurrentSteps 0
189 TestJSONOutput/unpause/Command 8.05
190 TestJSONOutput/unpause/Audit 0
192 TestJSONOutput/unpause/parallel/DistinctCurrentSteps 0
193 TestJSONOutput/unpause/parallel/IncreasingCurrentSteps 0
195 TestJSONOutput/stop/Command 35.25
196 TestJSONOutput/stop/Audit 0
198 TestJSONOutput/stop/parallel/DistinctCurrentSteps 0
199 TestJSONOutput/stop/parallel/IncreasingCurrentSteps 0
200 TestErrorJSONOutput 1
205 TestMainNoArgs 0.24
206 TestMinikubeProfile 538.04
209 TestMountStart/serial/StartWithMountFirst 158.67
210 TestMountStart/serial/VerifyMountFirst 9.84
211 TestMountStart/serial/StartWithMountSecond 158.53
212 TestMountStart/serial/VerifyMountSecond 9.89
213 TestMountStart/serial/DeleteFirst 28.08
214 TestMountStart/serial/VerifyMountPostDelete 10.32
215 TestMountStart/serial/Stop 32.38
216 TestMountStart/serial/RestartStopped 129.31
217 TestMountStart/serial/VerifyMountPostStop 10.36
224 TestMultiNode/serial/MultiNodeLabels 0.18
225 TestMultiNode/serial/ProfileList 36.55
231 TestPreload 565.77
232 TestScheduledStopWindows 332.97
242 TestNoKubernetes/serial/StartNoK8sWithVersion 0.43
x
+
TestDownloadOnly/v1.20.0/json-events (19.01s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-windows-amd64.exe start -o=json --download-only -p download-only-867200 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=docker --driver=hyperv
aaa_download_only_test.go:81: (dbg) Done: out/minikube-windows-amd64.exe start -o=json --download-only -p download-only-867200 --force --alsologtostderr --kubernetes-version=v1.20.0 --container-runtime=docker --driver=hyperv: (19.0135342s)
--- PASS: TestDownloadOnly/v1.20.0/json-events (19.01s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/preload-exists (0.09s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/preload-exists
I0408 17:49:01.507932    7904 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime docker
I0408 17:49:01.593305    7904 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4
--- PASS: TestDownloadOnly/v1.20.0/preload-exists (0.09s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/kubectl
--- PASS: TestDownloadOnly/v1.20.0/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/LogsDuration (0.3s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-windows-amd64.exe logs -p download-only-867200
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-windows-amd64.exe logs -p download-only-867200: exit status 85 (294.5229ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|----------|
	| Command |              Args              |       Profile        |       User        | Version |     Start Time      | End Time |
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|----------|
	| start   | -o=json --download-only        | download-only-867200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 17:48 UTC |          |
	|         | -p download-only-867200        |                      |                   |         |                     |          |
	|         | --force --alsologtostderr      |                      |                   |         |                     |          |
	|         | --kubernetes-version=v1.20.0   |                      |                   |         |                     |          |
	|         | --container-runtime=docker     |                      |                   |         |                     |          |
	|         | --driver=hyperv                |                      |                   |         |                     |          |
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|----------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 17:48:42
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 17:48:42.603230   12728 out.go:345] Setting OutFile to fd 716 ...
	I0408 17:48:42.679227   12728 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 17:48:42.679227   12728 out.go:358] Setting ErrFile to fd 720...
	I0408 17:48:42.679227   12728 out.go:392] TERM=,COLORTERM=, which probably does not support color
	W0408 17:48:42.697239   12728 root.go:314] Error reading config file at C:\Users\jenkins.minikube3\minikube-integration\.minikube\config\config.json: open C:\Users\jenkins.minikube3\minikube-integration\.minikube\config\config.json: The system cannot find the path specified.
	I0408 17:48:42.707238   12728 out.go:352] Setting JSON to true
	I0408 17:48:42.711230   12728 start.go:129] hostinfo: {"hostname":"minikube3","uptime":95308,"bootTime":1744039214,"procs":176,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 17:48:42.711230   12728 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 17:48:42.717358   12728 out.go:97] [download-only-867200] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 17:48:42.717358   12728 notify.go:220] Checking for updates...
	W0408 17:48:42.717358   12728 preload.go:293] Failed to list preload files: open C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball: The system cannot find the file specified.
	I0408 17:48:42.720233   12728 out.go:169] KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 17:48:42.723236   12728 out.go:169] MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 17:48:42.725230   12728 out.go:169] MINIKUBE_LOCATION=20604
	I0408 17:48:42.728256   12728 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	W0408 17:48:42.735228   12728 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0408 17:48:42.736239   12728 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 17:48:48.469341   12728 out.go:97] Using the hyperv driver based on user configuration
	I0408 17:48:48.469341   12728 start.go:297] selected driver: hyperv
	I0408 17:48:48.469341   12728 start.go:901] validating driver "hyperv" against <nil>
	I0408 17:48:48.470096   12728 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 17:48:48.524929   12728 start_flags.go:393] Using suggested 6000MB memory alloc based on sys=65534MB, container=0MB
	I0408 17:48:48.526113   12728 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0408 17:48:48.526113   12728 cni.go:84] Creating CNI manager for ""
	I0408 17:48:48.527167   12728 cni.go:162] CNI unnecessary in this configuration, recommending no CNI
	I0408 17:48:48.527167   12728 start.go:340] cluster config:
	{Name:download-only-867200 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:6000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.20.0 ClusterName:download-only-867200 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:docker CRISocket: NetworkPlugin: FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.20.0 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 17:48:48.528477   12728 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 17:48:48.533620   12728 out.go:97] Downloading VM boot image ...
	I0408 17:48:48.533620   12728 download.go:108] Downloading: https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso?checksum=file:https://storage.googleapis.com/minikube/iso/minikube-v1.35.0-amd64.iso.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\iso\amd64\minikube-v1.35.0-amd64.iso
	I0408 17:48:52.448532   12728 out.go:97] Starting "download-only-867200" primary control-plane node in "download-only-867200" cluster
	I0408 17:48:52.448532   12728 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime docker
	I0408 17:48:52.497856   12728 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4
	I0408 17:48:52.498069   12728 cache.go:56] Caching tarball of preloaded images
	I0408 17:48:52.498153   12728 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime docker
	I0408 17:48:52.501841   12728 out.go:97] Downloading Kubernetes v1.20.0 preload ...
	I0408 17:48:52.501841   12728 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:48:52.582917   12728 download.go:108] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.20.0/preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4?checksum=md5:9a82241e9b8b4ad2b5cca73108f2c7a3 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4
	I0408 17:48:55.497638   12728 preload.go:247] saving checksum for preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:48:55.498467   12728 preload.go:254] verifying checksum of C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.20.0-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:48:56.527403   12728 cache.go:59] Finished verifying existence of preloaded tar for v1.20.0 on docker
	I0408 17:48:56.527403   12728 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\download-only-867200\config.json ...
	I0408 17:48:56.527403   12728 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\download-only-867200\config.json: {Name:mkd1ae10e06d40bb03c9f66f0f94a4e64fc22873 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 17:48:56.529925   12728 preload.go:131] Checking if preload exists for k8s version v1.20.0 and runtime docker
	I0408 17:48:56.531665   12728 download.go:108] Downloading: https://dl.k8s.io/release/v1.20.0/bin/windows/amd64/kubectl.exe?checksum=file:https://dl.k8s.io/release/v1.20.0/bin/windows/amd64/kubectl.exe.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\windows\amd64\v1.20.0/kubectl.exe
	
	
	* The control-plane node download-only-867200 host does not exist
	  To start a cluster, run: "minikube start -p download-only-867200"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.20.0/LogsDuration (0.30s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAll (0.82s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-windows-amd64.exe delete --all
--- PASS: TestDownloadOnly/v1.20.0/DeleteAll (0.82s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.72s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-windows-amd64.exe delete -p download-only-867200
--- PASS: TestDownloadOnly/v1.20.0/DeleteAlwaysSucceeds (0.72s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/json-events (13.82s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/json-events
aaa_download_only_test.go:81: (dbg) Run:  out/minikube-windows-amd64.exe start -o=json --download-only -p download-only-785900 --force --alsologtostderr --kubernetes-version=v1.32.2 --container-runtime=docker --driver=hyperv
aaa_download_only_test.go:81: (dbg) Done: out/minikube-windows-amd64.exe start -o=json --download-only -p download-only-785900 --force --alsologtostderr --kubernetes-version=v1.32.2 --container-runtime=docker --driver=hyperv: (13.8197319s)
--- PASS: TestDownloadOnly/v1.32.2/json-events (13.82s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/preload-exists (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/preload-exists
I0408 17:49:17.246793    7904 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
I0408 17:49:17.247244    7904 preload.go:146] Found local preload: C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
--- PASS: TestDownloadOnly/v1.32.2/preload-exists (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/kubectl (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/kubectl
--- PASS: TestDownloadOnly/v1.32.2/kubectl (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/LogsDuration (0.43s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/LogsDuration
aaa_download_only_test.go:184: (dbg) Run:  out/minikube-windows-amd64.exe logs -p download-only-785900
aaa_download_only_test.go:184: (dbg) Non-zero exit: out/minikube-windows-amd64.exe logs -p download-only-785900: exit status 85 (429.272ms)

                                                
                                                
-- stdout --
	
	==> Audit <==
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	| Command |              Args              |       Profile        |       User        | Version |     Start Time      |      End Time       |
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	| start   | -o=json --download-only        | download-only-867200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 17:48 UTC |                     |
	|         | -p download-only-867200        |                      |                   |         |                     |                     |
	|         | --force --alsologtostderr      |                      |                   |         |                     |                     |
	|         | --kubernetes-version=v1.20.0   |                      |                   |         |                     |                     |
	|         | --container-runtime=docker     |                      |                   |         |                     |                     |
	|         | --driver=hyperv                |                      |                   |         |                     |                     |
	| delete  | --all                          | minikube             | minikube3\jenkins | v1.35.0 | 08 Apr 25 17:49 UTC | 08 Apr 25 17:49 UTC |
	| delete  | -p download-only-867200        | download-only-867200 | minikube3\jenkins | v1.35.0 | 08 Apr 25 17:49 UTC | 08 Apr 25 17:49 UTC |
	| start   | -o=json --download-only        | download-only-785900 | minikube3\jenkins | v1.35.0 | 08 Apr 25 17:49 UTC |                     |
	|         | -p download-only-785900        |                      |                   |         |                     |                     |
	|         | --force --alsologtostderr      |                      |                   |         |                     |                     |
	|         | --kubernetes-version=v1.32.2   |                      |                   |         |                     |                     |
	|         | --container-runtime=docker     |                      |                   |         |                     |                     |
	|         | --driver=hyperv                |                      |                   |         |                     |                     |
	|---------|--------------------------------|----------------------|-------------------|---------|---------------------|---------------------|
	
	
	==> Last Start <==
	Log file created at: 2025/04/08 17:49:03
	Running on machine: minikube3
	Binary: Built with gc go1.24.0 for windows/amd64
	Log line format: [IWEF]mmdd hh:mm:ss.uuuuuu threadid file:line] msg
	I0408 17:49:03.539514    8288 out.go:345] Setting OutFile to fd 576 ...
	I0408 17:49:03.615367    8288 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 17:49:03.615367    8288 out.go:358] Setting ErrFile to fd 724...
	I0408 17:49:03.615367    8288 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 17:49:03.634584    8288 out.go:352] Setting JSON to true
	I0408 17:49:03.637832    8288 start.go:129] hostinfo: {"hostname":"minikube3","uptime":95329,"bootTime":1744039214,"procs":176,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 17:49:03.637832    8288 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 17:49:03.642832    8288 out.go:97] [download-only-785900] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 17:49:03.643879    8288 notify.go:220] Checking for updates...
	I0408 17:49:03.646290    8288 out.go:169] KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 17:49:03.649643    8288 out.go:169] MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 17:49:03.652056    8288 out.go:169] MINIKUBE_LOCATION=20604
	I0408 17:49:03.655662    8288 out.go:169] MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	W0408 17:49:03.661669    8288 out.go:321] minikube skips various validations when --force is supplied; this may lead to unexpected behavior
	I0408 17:49:03.661669    8288 driver.go:394] Setting default libvirt URI to qemu:///system
	I0408 17:49:09.179793    8288 out.go:97] Using the hyperv driver based on user configuration
	I0408 17:49:09.179917    8288 start.go:297] selected driver: hyperv
	I0408 17:49:09.179917    8288 start.go:901] validating driver "hyperv" against <nil>
	I0408 17:49:09.180008    8288 start_flags.go:310] no existing cluster config was found, will generate one from the flags 
	I0408 17:49:09.234046    8288 start_flags.go:393] Using suggested 6000MB memory alloc based on sys=65534MB, container=0MB
	I0408 17:49:09.234771    8288 start_flags.go:929] Wait components to verify : map[apiserver:true system_pods:true]
	I0408 17:49:09.234771    8288 cni.go:84] Creating CNI manager for ""
	I0408 17:49:09.234771    8288 cni.go:158] "hyperv" driver + "docker" container runtime found on kubernetes v1.24+, recommending bridge
	I0408 17:49:09.234771    8288 start_flags.go:319] Found "bridge CNI" CNI - setting NetworkPlugin=cni
	I0408 17:49:09.235789    8288 start.go:340] cluster config:
	{Name:download-only-785900 KeepContext:false EmbedCerts:false MinikubeISO: KicBaseImage:gcr.io/k8s-minikube/kicbase-builds:v0.0.46-1744107393-20604@sha256:2430533582a8c08f907b2d5976c79bd2e672b4f3d4484088c99b839f3175ed6a Memory:6000 CPUs:2 DiskSize:20000 Driver:hyperv HyperkitVpnKitSock: HyperkitVSockPorts:[] DockerEnv:[] ContainerVolumeMounts:[] InsecureRegistry:[] RegistryMirror:[] HostOnlyCIDR:192.168.59.1/24 HypervVirtualSwitch: HypervUseExternalSwitch:false HypervExternalAdapter: KVMNetwork:default KVMQemuURI:qemu:///system KVMGPU:false KVMHidden:false KVMNUMACount:1 APIServerPort:8443 DockerOpt:[] DisableDriverMounts:false NFSShare:[] NFSSharesRoot:/nfsshares UUID: NoVTXCheck:false DNSProxy:false HostDNSResolver:true HostOnlyNicType:virtio NatNicType:virtio SSHIPAddress: SSHUser:root SSHKey: SSHPort:22 KubernetesConfig:{KubernetesVersion:v1.32.2 ClusterName:download-only-785900 Namespace:default APIServerHAVIP: APIServerName:minikubeCA APIServerNames:[] APIServerIPs:[] DNSDomain:cluster.local Co
ntainerRuntime:docker CRISocket: NetworkPlugin:cni FeatureGates: ServiceCIDR:10.96.0.0/12 ImageRepository: LoadBalancerStartIP: LoadBalancerEndIP: CustomIngressCert: RegistryAliases: ExtraOptions:[] ShouldLoadCachedImages:true EnableDefaultCNI:false CNI:} Nodes:[{Name: IP: Port:8443 KubernetesVersion:v1.32.2 ContainerRuntime:docker ControlPlane:true Worker:true}] Addons:map[] CustomAddonImages:map[] CustomAddonRegistries:map[] VerifyComponents:map[apiserver:true system_pods:true] StartHostTimeout:6m0s ScheduledStop:<nil> ExposedPorts:[] ListenAddress: Network: Subnet: MultiNodeRequested:false ExtraDisks:0 CertExpiration:26280h0m0s Mount:false MountString:C:\Users\jenkins.minikube3:/minikube-host Mount9PVersion:9p2000.L MountGID:docker MountIP: MountMSize:262144 MountOptions:[] MountPort:0 MountType:9p MountUID:docker BinaryMirror: DisableOptimizations:false DisableMetrics:false CustomQemuFirmwarePath: SocketVMnetClientPath: SocketVMnetPath: StaticIP: SSHAuthSock: SSHAgentPID:0 GPUs: AutoPauseInterval:1m0s}
	I0408 17:49:09.236066    8288 iso.go:125] acquiring lock: {Name:mk99bbb6a54210c1995fdf151b41c83b57c3735b Clock:{} Delay:500ms Timeout:10m0s Cancel:<nil>}
	I0408 17:49:09.241433    8288 out.go:97] Starting "download-only-785900" primary control-plane node in "download-only-785900" cluster
	I0408 17:49:09.241433    8288 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 17:49:09.290376    8288 preload.go:118] Found remote preload: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.32.2/preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 17:49:09.290376    8288 cache.go:56] Caching tarball of preloaded images
	I0408 17:49:09.291105    8288 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 17:49:09.294571    8288 out.go:97] Downloading Kubernetes v1.32.2 preload ...
	I0408 17:49:09.294663    8288 preload.go:236] getting checksum for preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:49:09.356558    8288 download.go:108] Downloading: https://storage.googleapis.com/minikube-preloaded-volume-tarballs/v18/v1.32.2/preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4?checksum=md5:c3fdd273d8c9002513e1c87be8fe9ffc -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4
	I0408 17:49:12.316678    8288 preload.go:247] saving checksum for preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:49:12.318176    8288 preload.go:254] verifying checksum of C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\preloaded-tarball\preloaded-images-k8s-v18-v1.32.2-docker-overlay2-amd64.tar.lz4 ...
	I0408 17:49:13.211844    8288 cache.go:59] Finished verifying existence of preloaded tar for v1.32.2 on docker
	I0408 17:49:13.213047    8288 profile.go:143] Saving config to C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\download-only-785900\config.json ...
	I0408 17:49:13.213047    8288 lock.go:35] WriteFile acquiring C:\Users\jenkins.minikube3\minikube-integration\.minikube\profiles\download-only-785900\config.json: {Name:mk79f2e8ebd1125ef0dcfaf8efef7d57bdc89a33 Clock:{} Delay:500ms Timeout:1m0s Cancel:<nil>}
	I0408 17:49:13.214777    8288 preload.go:131] Checking if preload exists for k8s version v1.32.2 and runtime docker
	I0408 17:49:13.215216    8288 download.go:108] Downloading: https://dl.k8s.io/release/v1.32.2/bin/windows/amd64/kubectl.exe?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/windows/amd64/kubectl.exe.sha256 -> C:\Users\jenkins.minikube3\minikube-integration\.minikube\cache\windows\amd64\v1.32.2/kubectl.exe
	
	
	* The control-plane node download-only-785900 host does not exist
	  To start a cluster, run: "minikube start -p download-only-785900"

                                                
                                                
-- /stdout --
aaa_download_only_test.go:185: minikube logs failed with error: exit status 85
--- PASS: TestDownloadOnly/v1.32.2/LogsDuration (0.43s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/DeleteAll (0.68s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/DeleteAll
aaa_download_only_test.go:197: (dbg) Run:  out/minikube-windows-amd64.exe delete --all
--- PASS: TestDownloadOnly/v1.32.2/DeleteAll (0.68s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds (0.79s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds
aaa_download_only_test.go:208: (dbg) Run:  out/minikube-windows-amd64.exe delete -p download-only-785900
--- PASS: TestDownloadOnly/v1.32.2/DeleteAlwaysSucceeds (0.79s)

                                                
                                    
x
+
TestBinaryMirror (9.84s)

                                                
                                                
=== RUN   TestBinaryMirror
I0408 17:49:20.542545    7904 binary.go:74] Not caching binary, using https://dl.k8s.io/release/v1.32.2/bin/windows/amd64/kubectl.exe?checksum=file:https://dl.k8s.io/release/v1.32.2/bin/windows/amd64/kubectl.exe.sha256
aaa_download_only_test.go:314: (dbg) Run:  out/minikube-windows-amd64.exe start --download-only -p binary-mirror-502300 --alsologtostderr --binary-mirror http://127.0.0.1:56527 --driver=hyperv
aaa_download_only_test.go:314: (dbg) Done: out/minikube-windows-amd64.exe start --download-only -p binary-mirror-502300 --alsologtostderr --binary-mirror http://127.0.0.1:56527 --driver=hyperv: (9.1477682s)
helpers_test.go:175: Cleaning up "binary-mirror-502300" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p binary-mirror-502300
--- PASS: TestBinaryMirror (9.84s)

                                                
                                    
x
+
TestOffline (258.36s)

                                                
                                                
=== RUN   TestOffline
=== PAUSE TestOffline

                                                
                                                

                                                
                                                
=== CONT  TestOffline
aab_offline_test.go:55: (dbg) Run:  out/minikube-windows-amd64.exe start -p offline-docker-632800 --alsologtostderr -v=1 --memory=2048 --wait=true --driver=hyperv
aab_offline_test.go:55: (dbg) Done: out/minikube-windows-amd64.exe start -p offline-docker-632800 --alsologtostderr -v=1 --memory=2048 --wait=true --driver=hyperv: (3m32.0514019s)
helpers_test.go:175: Cleaning up "offline-docker-632800" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p offline-docker-632800
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p offline-docker-632800: (46.3087958s)
--- PASS: TestOffline (258.36s)

                                                
                                    
x
+
TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.3s)

                                                
                                                
=== RUN   TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/EnablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/EnablingAddonOnNonExistingCluster
addons_test.go:939: (dbg) Run:  out/minikube-windows-amd64.exe addons enable dashboard -p addons-847800
addons_test.go:939: (dbg) Non-zero exit: out/minikube-windows-amd64.exe addons enable dashboard -p addons-847800: exit status 85 (295.5233ms)

                                                
                                                
-- stdout --
	* Profile "addons-847800" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-847800"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/EnablingAddonOnNonExistingCluster (0.30s)

                                                
                                    
x
+
TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.28s)

                                                
                                                
=== RUN   TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
=== PAUSE TestAddons/PreSetup/DisablingAddonOnNonExistingCluster

                                                
                                                

                                                
                                                
=== CONT  TestAddons/PreSetup/DisablingAddonOnNonExistingCluster
addons_test.go:950: (dbg) Run:  out/minikube-windows-amd64.exe addons disable dashboard -p addons-847800
addons_test.go:950: (dbg) Non-zero exit: out/minikube-windows-amd64.exe addons disable dashboard -p addons-847800: exit status 85 (281.4182ms)

                                                
                                                
-- stdout --
	* Profile "addons-847800" not found. Run "minikube profile list" to view all profiles.
	  To start a cluster, run: "minikube start -p addons-847800"

                                                
                                                
-- /stdout --
--- PASS: TestAddons/PreSetup/DisablingAddonOnNonExistingCluster (0.28s)

                                                
                                    
x
+
TestAddons/Setup (441.52s)

                                                
                                                
=== RUN   TestAddons/Setup
addons_test.go:107: (dbg) Run:  out/minikube-windows-amd64.exe start -p addons-847800 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=hyperv --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher
addons_test.go:107: (dbg) Done: out/minikube-windows-amd64.exe start -p addons-847800 --wait=true --memory=4000 --alsologtostderr --addons=registry --addons=metrics-server --addons=volumesnapshots --addons=csi-hostpath-driver --addons=gcp-auth --addons=cloud-spanner --addons=inspektor-gadget --addons=nvidia-device-plugin --addons=yakd --addons=volcano --addons=amd-gpu-device-plugin --driver=hyperv --addons=ingress --addons=ingress-dns --addons=storage-provisioner-rancher: (7m21.5189514s)
--- PASS: TestAddons/Setup (441.52s)

                                                
                                    
x
+
TestAddons/serial/Volcano (66.05s)

                                                
                                                
=== RUN   TestAddons/serial/Volcano
addons_test.go:823: volcano-controller stabilized in 18.7954ms
addons_test.go:815: volcano-admission stabilized in 18.9285ms
addons_test.go:807: volcano-scheduler stabilized in 19.2288ms
addons_test.go:829: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-scheduler" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-scheduler-75fdd99bcf-2hss9" [4d1ca53f-3f53-4108-8864-2cdc70891513] Running
addons_test.go:829: (dbg) TestAddons/serial/Volcano: app=volcano-scheduler healthy within 5.0057583s
addons_test.go:833: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-admission" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-admission-75d8f6b5c-6c58z" [55f97e3e-b667-4eb1-8d41-b83056d1ff7d] Running
addons_test.go:833: (dbg) TestAddons/serial/Volcano: app=volcano-admission healthy within 5.0070003s
addons_test.go:837: (dbg) TestAddons/serial/Volcano: waiting 6m0s for pods matching "app=volcano-controller" in namespace "volcano-system" ...
helpers_test.go:344: "volcano-controllers-86bdc5c9c-q69k7" [04d00821-2d91-4d36-9596-69c45a35de1a] Running
addons_test.go:837: (dbg) TestAddons/serial/Volcano: app=volcano-controller healthy within 6.0076057s
addons_test.go:842: (dbg) Run:  kubectl --context addons-847800 delete -n volcano-system job volcano-admission-init
addons_test.go:848: (dbg) Run:  kubectl --context addons-847800 create -f testdata\vcjob.yaml
addons_test.go:856: (dbg) Run:  kubectl --context addons-847800 get vcjob -n my-volcano
addons_test.go:874: (dbg) TestAddons/serial/Volcano: waiting 3m0s for pods matching "volcano.sh/job-name=test-job" in namespace "my-volcano" ...
helpers_test.go:344: "test-job-nginx-0" [476c3b8e-5de8-46b3-9a07-2671b1278d29] Pending
helpers_test.go:344: "test-job-nginx-0" [476c3b8e-5de8-46b3-9a07-2671b1278d29] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "test-job-nginx-0" [476c3b8e-5de8-46b3-9a07-2671b1278d29] Running
addons_test.go:874: (dbg) TestAddons/serial/Volcano: volcano.sh/job-name=test-job healthy within 23.0047333s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable volcano --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable volcano --alsologtostderr -v=1: (26.1732257s)
--- PASS: TestAddons/serial/Volcano (66.05s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/Namespaces (0.36s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/Namespaces
addons_test.go:569: (dbg) Run:  kubectl --context addons-847800 create ns new-namespace
addons_test.go:583: (dbg) Run:  kubectl --context addons-847800 get secret gcp-auth -n new-namespace
--- PASS: TestAddons/serial/GCPAuth/Namespaces (0.36s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/FakeCredentials (10.63s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/FakeCredentials
addons_test.go:614: (dbg) Run:  kubectl --context addons-847800 create -f testdata\busybox.yaml
addons_test.go:621: (dbg) Run:  kubectl --context addons-847800 create sa gcp-auth-test
addons_test.go:627: (dbg) TestAddons/serial/GCPAuth/FakeCredentials: waiting 8m0s for pods matching "integration-test=busybox" in namespace "default" ...
helpers_test.go:344: "busybox" [73137216-8d49-4014-b72d-7510ecb43d10] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "busybox" [73137216-8d49-4014-b72d-7510ecb43d10] Running
addons_test.go:627: (dbg) TestAddons/serial/GCPAuth/FakeCredentials: integration-test=busybox healthy within 9.0066007s
addons_test.go:633: (dbg) Run:  kubectl --context addons-847800 exec busybox -- /bin/sh -c "printenv GOOGLE_APPLICATION_CREDENTIALS"
addons_test.go:645: (dbg) Run:  kubectl --context addons-847800 describe sa gcp-auth-test
addons_test.go:659: (dbg) Run:  kubectl --context addons-847800 exec busybox -- /bin/sh -c "cat /google-app-creds.json"
addons_test.go:683: (dbg) Run:  kubectl --context addons-847800 exec busybox -- /bin/sh -c "printenv GOOGLE_CLOUD_PROJECT"
--- PASS: TestAddons/serial/GCPAuth/FakeCredentials (10.63s)

                                                
                                    
x
+
TestAddons/parallel/Registry (37.62s)

                                                
                                                
=== RUN   TestAddons/parallel/Registry
=== PAUSE TestAddons/parallel/Registry

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Registry
addons_test.go:321: registry stabilized in 9.1438ms
addons_test.go:323: (dbg) TestAddons/parallel/Registry: waiting 6m0s for pods matching "actual-registry=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-6c88467877-fbts5" [a062ee41-d504-41dd-a1ba-fa7a66c2dfb1] Running
addons_test.go:323: (dbg) TestAddons/parallel/Registry: actual-registry=true healthy within 5.0051703s
addons_test.go:326: (dbg) TestAddons/parallel/Registry: waiting 10m0s for pods matching "registry-proxy=true" in namespace "kube-system" ...
helpers_test.go:344: "registry-proxy-xsjbx" [1967e3a1-19d4-411e-a2d0-c9e1e52bd5dd] Running
addons_test.go:326: (dbg) TestAddons/parallel/Registry: registry-proxy=true healthy within 5.0074175s
addons_test.go:331: (dbg) Run:  kubectl --context addons-847800 delete po -l run=registry-test --now
addons_test.go:336: (dbg) Run:  kubectl --context addons-847800 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local"
addons_test.go:336: (dbg) Done: kubectl --context addons-847800 run --rm registry-test --restart=Never --image=gcr.io/k8s-minikube/busybox -it -- sh -c "wget --spider -S http://registry.kube-system.svc.cluster.local": (7.9731292s)
addons_test.go:350: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 ip
addons_test.go:350: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 ip: (2.9801262s)
2025/04/08 17:59:03 [DEBUG] GET http://172.22.44.145:5000
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable registry --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable registry --alsologtostderr -v=1: (16.4188428s)
--- PASS: TestAddons/parallel/Registry (37.62s)

                                                
                                    
x
+
TestAddons/parallel/Ingress (67.44s)

                                                
                                                
=== RUN   TestAddons/parallel/Ingress
=== PAUSE TestAddons/parallel/Ingress

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Ingress
addons_test.go:207: (dbg) Run:  kubectl --context addons-847800 wait --for=condition=ready --namespace=ingress-nginx pod --selector=app.kubernetes.io/component=controller --timeout=90s
addons_test.go:232: (dbg) Run:  kubectl --context addons-847800 replace --force -f testdata\nginx-ingress-v1.yaml
addons_test.go:245: (dbg) Run:  kubectl --context addons-847800 replace --force -f testdata\nginx-pod-svc.yaml
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: waiting 8m0s for pods matching "run=nginx" in namespace "default" ...
helpers_test.go:344: "nginx" [72c57ee4-4f52-4506-8f17-de1753f40fc3] Pending / Ready:ContainersNotReady (containers with unready status: [nginx]) / ContainersReady:ContainersNotReady (containers with unready status: [nginx])
helpers_test.go:344: "nginx" [72c57ee4-4f52-4506-8f17-de1753f40fc3] Running
addons_test.go:250: (dbg) TestAddons/parallel/Ingress: run=nginx healthy within 14.0057357s
I0408 17:59:57.046681    7904 kapi.go:150] Service nginx in namespace default found.
addons_test.go:262: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'"
addons_test.go:262: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 ssh "curl -s http://127.0.0.1/ -H 'Host: nginx.example.com'": (9.7247657s)
addons_test.go:286: (dbg) Run:  kubectl --context addons-847800 replace --force -f testdata\ingress-dns-example-v1.yaml
addons_test.go:291: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 ip
addons_test.go:291: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 ip: (2.528408s)
addons_test.go:297: (dbg) Run:  nslookup hello-john.test 172.22.44.145
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable ingress-dns --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable ingress-dns --alsologtostderr -v=1: (16.2612107s)
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable ingress --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable ingress --alsologtostderr -v=1: (22.6953475s)
--- PASS: TestAddons/parallel/Ingress (67.44s)

                                                
                                    
x
+
TestAddons/parallel/InspektorGadget (27.42s)

                                                
                                                
=== RUN   TestAddons/parallel/InspektorGadget
=== PAUSE TestAddons/parallel/InspektorGadget

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/InspektorGadget
addons_test.go:762: (dbg) TestAddons/parallel/InspektorGadget: waiting 8m0s for pods matching "k8s-app=gadget" in namespace "gadget" ...
helpers_test.go:344: "gadget-vwm89" [33c7e26c-a260-4aa5-8407-c9163a24f9ab] Running
addons_test.go:762: (dbg) TestAddons/parallel/InspektorGadget: k8s-app=gadget healthy within 6.0126613s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable inspektor-gadget --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable inspektor-gadget --alsologtostderr -v=1: (21.4018658s)
--- PASS: TestAddons/parallel/InspektorGadget (27.42s)

                                                
                                    
x
+
TestAddons/parallel/MetricsServer (23.74s)

                                                
                                                
=== RUN   TestAddons/parallel/MetricsServer
=== PAUSE TestAddons/parallel/MetricsServer

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/MetricsServer
addons_test.go:394: metrics-server stabilized in 11.1904ms
addons_test.go:396: (dbg) TestAddons/parallel/MetricsServer: waiting 6m0s for pods matching "k8s-app=metrics-server" in namespace "kube-system" ...
helpers_test.go:344: "metrics-server-7fbb699795-k4hql" [b236da71-1d61-4548-9229-2274fffb668e] Running
addons_test.go:396: (dbg) TestAddons/parallel/MetricsServer: k8s-app=metrics-server healthy within 5.0051508s
addons_test.go:402: (dbg) Run:  kubectl --context addons-847800 top pods -n kube-system
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable metrics-server --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable metrics-server --alsologtostderr -v=1: (18.5185912s)
--- PASS: TestAddons/parallel/MetricsServer (23.74s)

                                                
                                    
x
+
TestAddons/parallel/CSI (85.26s)

                                                
                                                
=== RUN   TestAddons/parallel/CSI
=== PAUSE TestAddons/parallel/CSI

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CSI
I0408 17:59:32.992011    7904 kapi.go:75] Waiting for pod with label "kubernetes.io/minikube-addons=csi-hostpath-driver" in ns "kube-system" ...
I0408 17:59:32.996980    7904 kapi.go:86] Found 3 Pods for label selector kubernetes.io/minikube-addons=csi-hostpath-driver
I0408 17:59:32.996980    7904 kapi.go:107] duration metric: took 4.9689ms to wait for kubernetes.io/minikube-addons=csi-hostpath-driver ...
addons_test.go:488: csi-hostpath-driver pods stabilized in 4.9689ms
addons_test.go:491: (dbg) Run:  kubectl --context addons-847800 create -f testdata\csi-hostpath-driver\pvc.yaml
addons_test.go:496: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc -o jsonpath={.status.phase} -n default
addons_test.go:501: (dbg) Run:  kubectl --context addons-847800 create -f testdata\csi-hostpath-driver\pv-pod.yaml
addons_test.go:506: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod" in namespace "default" ...
helpers_test.go:344: "task-pv-pod" [898499ca-aee7-49d2-bbba-a76a17dcc450] Pending
helpers_test.go:344: "task-pv-pod" [898499ca-aee7-49d2-bbba-a76a17dcc450] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod" [898499ca-aee7-49d2-bbba-a76a17dcc450] Running
addons_test.go:506: (dbg) TestAddons/parallel/CSI: app=task-pv-pod healthy within 9.0065169s
addons_test.go:511: (dbg) Run:  kubectl --context addons-847800 create -f testdata\csi-hostpath-driver\snapshot.yaml
addons_test.go:516: (dbg) TestAddons/parallel/CSI: waiting 6m0s for volume snapshot "new-snapshot-demo" in namespace "default" ...
helpers_test.go:419: (dbg) Run:  kubectl --context addons-847800 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
helpers_test.go:427: TestAddons/parallel/CSI: WARNING: volume snapshot get for "default" "new-snapshot-demo" returned: 
helpers_test.go:419: (dbg) Run:  kubectl --context addons-847800 get volumesnapshot new-snapshot-demo -o jsonpath={.status.readyToUse} -n default
addons_test.go:521: (dbg) Run:  kubectl --context addons-847800 delete pod task-pv-pod
addons_test.go:521: (dbg) Done: kubectl --context addons-847800 delete pod task-pv-pod: (2.0828347s)
addons_test.go:527: (dbg) Run:  kubectl --context addons-847800 delete pvc hpvc
addons_test.go:533: (dbg) Run:  kubectl --context addons-847800 create -f testdata\csi-hostpath-driver\pvc-restore.yaml
addons_test.go:538: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pvc "hpvc-restore" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc hpvc-restore -o jsonpath={.status.phase} -n default
addons_test.go:543: (dbg) Run:  kubectl --context addons-847800 create -f testdata\csi-hostpath-driver\pv-pod-restore.yaml
addons_test.go:548: (dbg) TestAddons/parallel/CSI: waiting 6m0s for pods matching "app=task-pv-pod-restore" in namespace "default" ...
helpers_test.go:344: "task-pv-pod-restore" [63a79825-3b3d-4222-95b1-becc261b0c78] Pending
helpers_test.go:344: "task-pv-pod-restore" [63a79825-3b3d-4222-95b1-becc261b0c78] Pending / Ready:ContainersNotReady (containers with unready status: [task-pv-container]) / ContainersReady:ContainersNotReady (containers with unready status: [task-pv-container])
helpers_test.go:344: "task-pv-pod-restore" [63a79825-3b3d-4222-95b1-becc261b0c78] Running
addons_test.go:548: (dbg) TestAddons/parallel/CSI: app=task-pv-pod-restore healthy within 8.0079528s
addons_test.go:553: (dbg) Run:  kubectl --context addons-847800 delete pod task-pv-pod-restore
addons_test.go:553: (dbg) Done: kubectl --context addons-847800 delete pod task-pv-pod-restore: (1.6210891s)
addons_test.go:557: (dbg) Run:  kubectl --context addons-847800 delete pvc hpvc-restore
addons_test.go:561: (dbg) Run:  kubectl --context addons-847800 delete volumesnapshot new-snapshot-demo
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable volumesnapshots --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable volumesnapshots --alsologtostderr -v=1: (15.8455236s)
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable csi-hostpath-driver --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable csi-hostpath-driver --alsologtostderr -v=1: (21.4308598s)
--- PASS: TestAddons/parallel/CSI (85.26s)

                                                
                                    
x
+
TestAddons/parallel/Headlamp (50.25s)

                                                
                                                
=== RUN   TestAddons/parallel/Headlamp
=== PAUSE TestAddons/parallel/Headlamp

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Headlamp
addons_test.go:747: (dbg) Run:  out/minikube-windows-amd64.exe addons enable headlamp -p addons-847800 --alsologtostderr -v=1
addons_test.go:747: (dbg) Done: out/minikube-windows-amd64.exe addons enable headlamp -p addons-847800 --alsologtostderr -v=1: (18.6246972s)
addons_test.go:752: (dbg) TestAddons/parallel/Headlamp: waiting 8m0s for pods matching "app.kubernetes.io/name=headlamp" in namespace "headlamp" ...
helpers_test.go:344: "headlamp-5d4b5d7bd6-fhtpw" [a1095eb2-ff12-4e5f-bcf1-14b891804e5b] Pending
helpers_test.go:344: "headlamp-5d4b5d7bd6-fhtpw" [a1095eb2-ff12-4e5f-bcf1-14b891804e5b] Pending / Ready:ContainersNotReady (containers with unready status: [headlamp]) / ContainersReady:ContainersNotReady (containers with unready status: [headlamp])
helpers_test.go:344: "headlamp-5d4b5d7bd6-fhtpw" [a1095eb2-ff12-4e5f-bcf1-14b891804e5b] Running
addons_test.go:752: (dbg) TestAddons/parallel/Headlamp: app.kubernetes.io/name=headlamp healthy within 23.2444615s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable headlamp --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable headlamp --alsologtostderr -v=1: (8.3710437s)
--- PASS: TestAddons/parallel/Headlamp (50.25s)

                                                
                                    
x
+
TestAddons/parallel/CloudSpanner (22.27s)

                                                
                                                
=== RUN   TestAddons/parallel/CloudSpanner
=== PAUSE TestAddons/parallel/CloudSpanner

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/CloudSpanner
addons_test.go:779: (dbg) TestAddons/parallel/CloudSpanner: waiting 6m0s for pods matching "app=cloud-spanner-emulator" in namespace "default" ...
helpers_test.go:344: "cloud-spanner-emulator-7dc7f9b5b8-ggkwm" [a43ad358-1b77-470d-b0c0-91f93cc31a76] Running
addons_test.go:779: (dbg) TestAddons/parallel/CloudSpanner: app=cloud-spanner-emulator healthy within 6.0057236s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable cloud-spanner --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable cloud-spanner --alsologtostderr -v=1: (16.2421183s)
--- PASS: TestAddons/parallel/CloudSpanner (22.27s)

                                                
                                    
x
+
TestAddons/parallel/LocalPath (46.82s)

                                                
                                                
=== RUN   TestAddons/parallel/LocalPath
=== PAUSE TestAddons/parallel/LocalPath

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/LocalPath
addons_test.go:888: (dbg) Run:  kubectl --context addons-847800 apply -f testdata\storage-provisioner-rancher\pvc.yaml
addons_test.go:894: (dbg) Run:  kubectl --context addons-847800 apply -f testdata\storage-provisioner-rancher\pod.yaml
addons_test.go:898: (dbg) TestAddons/parallel/LocalPath: waiting 5m0s for pvc "test-pvc" in namespace "default" ...
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
helpers_test.go:394: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o jsonpath={.status.phase} -n default
addons_test.go:901: (dbg) TestAddons/parallel/LocalPath: waiting 3m0s for pods matching "run=test-local-path" in namespace "default" ...
helpers_test.go:344: "test-local-path" [5a9697cf-57d3-4513-8449-e37f4e9db339] Pending
helpers_test.go:344: "test-local-path" [5a9697cf-57d3-4513-8449-e37f4e9db339] Pending / Ready:ContainersNotReady (containers with unready status: [busybox]) / ContainersReady:ContainersNotReady (containers with unready status: [busybox])
helpers_test.go:344: "test-local-path" [5a9697cf-57d3-4513-8449-e37f4e9db339] Pending / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
helpers_test.go:344: "test-local-path" [5a9697cf-57d3-4513-8449-e37f4e9db339] Succeeded / Initialized:PodCompleted / Ready:PodCompleted / ContainersReady:PodCompleted
addons_test.go:901: (dbg) TestAddons/parallel/LocalPath: run=test-local-path healthy within 8.0059384s
addons_test.go:906: (dbg) Run:  kubectl --context addons-847800 get pvc test-pvc -o=json
addons_test.go:915: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 ssh "cat /opt/local-path-provisioner/pvc-f41bd989-2dc4-43cb-a265-e64a16d8fa44_default_test-pvc/file1"
addons_test.go:915: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 ssh "cat /opt/local-path-provisioner/pvc-f41bd989-2dc4-43cb-a265-e64a16d8fa44_default_test-pvc/file1": (10.5813938s)
addons_test.go:927: (dbg) Run:  kubectl --context addons-847800 delete pod test-local-path
addons_test.go:931: (dbg) Run:  kubectl --context addons-847800 delete pvc test-pvc
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable storage-provisioner-rancher --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable storage-provisioner-rancher --alsologtostderr -v=1: (8.2638913s)
--- PASS: TestAddons/parallel/LocalPath (46.82s)

                                                
                                    
x
+
TestAddons/parallel/NvidiaDevicePlugin (23.09s)

                                                
                                                
=== RUN   TestAddons/parallel/NvidiaDevicePlugin
=== PAUSE TestAddons/parallel/NvidiaDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/NvidiaDevicePlugin
addons_test.go:964: (dbg) TestAddons/parallel/NvidiaDevicePlugin: waiting 6m0s for pods matching "name=nvidia-device-plugin-ds" in namespace "kube-system" ...
helpers_test.go:344: "nvidia-device-plugin-daemonset-4trct" [429f60c9-df63-48eb-9d4f-efb34d17310f] Running
addons_test.go:964: (dbg) TestAddons/parallel/NvidiaDevicePlugin: name=nvidia-device-plugin-ds healthy within 6.0068816s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable nvidia-device-plugin --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable nvidia-device-plugin --alsologtostderr -v=1: (17.0775732s)
--- PASS: TestAddons/parallel/NvidiaDevicePlugin (23.09s)

                                                
                                    
x
+
TestAddons/parallel/Yakd (27.94s)

                                                
                                                
=== RUN   TestAddons/parallel/Yakd
=== PAUSE TestAddons/parallel/Yakd

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Yakd
addons_test.go:986: (dbg) TestAddons/parallel/Yakd: waiting 2m0s for pods matching "app.kubernetes.io/name=yakd-dashboard" in namespace "yakd-dashboard" ...
helpers_test.go:344: "yakd-dashboard-575dd5996b-n9mjc" [7ababf21-dfba-43f9-9c1e-fe64fc9df636] Running
addons_test.go:986: (dbg) TestAddons/parallel/Yakd: app.kubernetes.io/name=yakd-dashboard healthy within 6.0087927s
addons_test.go:992: (dbg) Run:  out/minikube-windows-amd64.exe -p addons-847800 addons disable yakd --alsologtostderr -v=1
addons_test.go:992: (dbg) Done: out/minikube-windows-amd64.exe -p addons-847800 addons disable yakd --alsologtostderr -v=1: (21.9280091s)
--- PASS: TestAddons/parallel/Yakd (27.94s)

                                                
                                    
x
+
TestAddons/StoppedEnableDisable (53.78s)

                                                
                                                
=== RUN   TestAddons/StoppedEnableDisable
addons_test.go:170: (dbg) Run:  out/minikube-windows-amd64.exe stop -p addons-847800
addons_test.go:170: (dbg) Done: out/minikube-windows-amd64.exe stop -p addons-847800: (41.1614672s)
addons_test.go:174: (dbg) Run:  out/minikube-windows-amd64.exe addons enable dashboard -p addons-847800
addons_test.go:174: (dbg) Done: out/minikube-windows-amd64.exe addons enable dashboard -p addons-847800: (5.1885304s)
addons_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe addons disable dashboard -p addons-847800
addons_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe addons disable dashboard -p addons-847800: (4.8037809s)
addons_test.go:183: (dbg) Run:  out/minikube-windows-amd64.exe addons disable gvisor -p addons-847800
addons_test.go:183: (dbg) Done: out/minikube-windows-amd64.exe addons disable gvisor -p addons-847800: (2.6281285s)
--- PASS: TestAddons/StoppedEnableDisable (53.78s)

                                                
                                    
x
+
TestForceSystemdFlag (405.81s)

                                                
                                                
=== RUN   TestForceSystemdFlag
=== PAUSE TestForceSystemdFlag

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdFlag
docker_test.go:91: (dbg) Run:  out/minikube-windows-amd64.exe start -p force-systemd-flag-662500 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=hyperv
docker_test.go:91: (dbg) Done: out/minikube-windows-amd64.exe start -p force-systemd-flag-662500 --memory=2048 --force-systemd --alsologtostderr -v=5 --driver=hyperv: (5m55.470179s)
docker_test.go:110: (dbg) Run:  out/minikube-windows-amd64.exe -p force-systemd-flag-662500 ssh "docker info --format {{.CgroupDriver}}"
docker_test.go:110: (dbg) Done: out/minikube-windows-amd64.exe -p force-systemd-flag-662500 ssh "docker info --format {{.CgroupDriver}}": (10.3354283s)
helpers_test.go:175: Cleaning up "force-systemd-flag-662500" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p force-systemd-flag-662500
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p force-systemd-flag-662500: (39.997392s)
--- PASS: TestForceSystemdFlag (405.81s)

                                                
                                    
x
+
TestForceSystemdEnv (552.52s)

                                                
                                                
=== RUN   TestForceSystemdEnv
=== PAUSE TestForceSystemdEnv

                                                
                                                

                                                
                                                
=== CONT  TestForceSystemdEnv
docker_test.go:155: (dbg) Run:  out/minikube-windows-amd64.exe start -p force-systemd-env-567900 --memory=2048 --alsologtostderr -v=5 --driver=hyperv
docker_test.go:155: (dbg) Done: out/minikube-windows-amd64.exe start -p force-systemd-env-567900 --memory=2048 --alsologtostderr -v=5 --driver=hyperv: (8m13.5562028s)
docker_test.go:110: (dbg) Run:  out/minikube-windows-amd64.exe -p force-systemd-env-567900 ssh "docker info --format {{.CgroupDriver}}"
docker_test.go:110: (dbg) Done: out/minikube-windows-amd64.exe -p force-systemd-env-567900 ssh "docker info --format {{.CgroupDriver}}": (10.6287788s)
helpers_test.go:175: Cleaning up "force-systemd-env-567900" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p force-systemd-env-567900
E0408 20:46:05.491634    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p force-systemd-env-567900: (48.3344552s)
--- PASS: TestForceSystemdEnv (552.52s)

                                                
                                    
x
+
TestErrorSpam/start (17.39s)

                                                
                                                
=== RUN   TestErrorSpam/start
error_spam_test.go:216: Cleaning up 1 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run: (5.6955926s)
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run: (5.8092956s)
error_spam_test.go:182: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run
error_spam_test.go:182: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 start --dry-run: (5.8843162s)
--- PASS: TestErrorSpam/start (17.39s)

                                                
                                    
x
+
TestErrorSpam/status (37.69s)

                                                
                                                
=== RUN   TestErrorSpam/status
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status: (12.9401742s)
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status: (12.2528885s)
error_spam_test.go:182: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status
E0408 18:06:52.214393    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.221419    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.234152    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.256848    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.299518    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.382611    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.543959    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:52.865258    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:182: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 status: (12.4923138s)
--- PASS: TestErrorSpam/status (37.69s)

                                                
                                    
x
+
TestErrorSpam/pause (23.58s)

                                                
                                                
=== RUN   TestErrorSpam/pause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause
E0408 18:06:53.508066    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:54.790758    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:06:57.352774    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause: (8.0569953s)
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause
E0408 18:07:02.474945    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause: (7.7016875s)
error_spam_test.go:182: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause
E0408 18:07:12.717974    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:182: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 pause: (7.8191186s)
--- PASS: TestErrorSpam/pause (23.58s)

                                                
                                    
x
+
TestErrorSpam/unpause (23.67s)

                                                
                                                
=== RUN   TestErrorSpam/unpause
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause: (7.9138118s)
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause: (7.8447609s)
error_spam_test.go:182: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause
E0408 18:07:33.200771    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:182: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 unpause: (7.9119004s)
--- PASS: TestErrorSpam/unpause (23.67s)

                                                
                                    
x
+
TestErrorSpam/stop (63.21s)

                                                
                                                
=== RUN   TestErrorSpam/stop
error_spam_test.go:216: Cleaning up 0 logfile(s) ...
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop
E0408 18:08:14.164954    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop: (39.9872419s)
error_spam_test.go:159: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop
error_spam_test.go:159: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop: (12.2100983s)
error_spam_test.go:182: (dbg) Run:  out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop
error_spam_test.go:182: (dbg) Done: out/minikube-windows-amd64.exe -p nospam-514800 --log_dir C:\Users\jenkins.minikube3\AppData\Local\Temp\nospam-514800 stop: (11.0089832s)
--- PASS: TestErrorSpam/stop (63.21s)

                                                
                                    
x
+
TestFunctional/serial/CopySyncFile (0.05s)

                                                
                                                
=== RUN   TestFunctional/serial/CopySyncFile
functional_test.go:1872: local sync path: C:\Users\jenkins.minikube3\minikube-integration\.minikube\files\etc\test\nested\copy\7904\hosts
--- PASS: TestFunctional/serial/CopySyncFile (0.05s)

                                                
                                    
x
+
TestFunctional/serial/StartWithProxy (232.32s)

                                                
                                                
=== RUN   TestFunctional/serial/StartWithProxy
functional_test.go:2251: (dbg) Run:  out/minikube-windows-amd64.exe start -p functional-873100 --memory=4000 --apiserver-port=8441 --wait=all --driver=hyperv
E0408 18:09:36.087504    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:11:52.217069    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:12:19.931653    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
functional_test.go:2251: (dbg) Done: out/minikube-windows-amd64.exe start -p functional-873100 --memory=4000 --apiserver-port=8441 --wait=all --driver=hyperv: (3m52.3146639s)
--- PASS: TestFunctional/serial/StartWithProxy (232.32s)

                                                
                                    
x
+
TestFunctional/serial/AuditLog (0s)

                                                
                                                
=== RUN   TestFunctional/serial/AuditLog
--- PASS: TestFunctional/serial/AuditLog (0.00s)

                                                
                                    
x
+
TestFunctional/serial/SoftStart (129.05s)

                                                
                                                
=== RUN   TestFunctional/serial/SoftStart
I0408 18:12:52.670940    7904 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
functional_test.go:676: (dbg) Run:  out/minikube-windows-amd64.exe start -p functional-873100 --alsologtostderr -v=8
functional_test.go:676: (dbg) Done: out/minikube-windows-amd64.exe start -p functional-873100 --alsologtostderr -v=8: (2m9.043779s)
functional_test.go:680: soft start took 2m9.0455498s for "functional-873100" cluster.
I0408 18:15:01.716974    7904 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
--- PASS: TestFunctional/serial/SoftStart (129.05s)

                                                
                                    
x
+
TestFunctional/serial/KubeContext (0.13s)

                                                
                                                
=== RUN   TestFunctional/serial/KubeContext
functional_test.go:698: (dbg) Run:  kubectl config current-context
--- PASS: TestFunctional/serial/KubeContext (0.13s)

                                                
                                    
x
+
TestFunctional/serial/KubectlGetPods (0.24s)

                                                
                                                
=== RUN   TestFunctional/serial/KubectlGetPods
functional_test.go:713: (dbg) Run:  kubectl --context functional-873100 get po -A
--- PASS: TestFunctional/serial/KubectlGetPods (0.24s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_remote (26.73s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_remote
functional_test.go:1066: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:3.1
functional_test.go:1066: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:3.1: (8.8512833s)
functional_test.go:1066: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:3.3
functional_test.go:1066: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:3.3: (8.9906535s)
functional_test.go:1066: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:latest
functional_test.go:1066: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cache add registry.k8s.io/pause:latest: (8.8905172s)
--- PASS: TestFunctional/serial/CacheCmd/cache/add_remote (26.73s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/add_local (10.73s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/add_local
functional_test.go:1094: (dbg) Run:  docker build -t minikube-local-cache-test:functional-873100 C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalserialCacheCmdcacheadd_local306270853\001
functional_test.go:1094: (dbg) Done: docker build -t minikube-local-cache-test:functional-873100 C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalserialCacheCmdcacheadd_local306270853\001: (1.8021628s)
functional_test.go:1106: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache add minikube-local-cache-test:functional-873100
functional_test.go:1106: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cache add minikube-local-cache-test:functional-873100: (8.5119374s)
functional_test.go:1111: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache delete minikube-local-cache-test:functional-873100
functional_test.go:1100: (dbg) Run:  docker rmi minikube-local-cache-test:functional-873100
--- PASS: TestFunctional/serial/CacheCmd/cache/add_local (10.73s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/CacheDelete (0.27s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/CacheDelete
functional_test.go:1119: (dbg) Run:  out/minikube-windows-amd64.exe cache delete registry.k8s.io/pause:3.3
--- PASS: TestFunctional/serial/CacheCmd/cache/CacheDelete (0.27s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/list (0.27s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/list
functional_test.go:1127: (dbg) Run:  out/minikube-windows-amd64.exe cache list
--- PASS: TestFunctional/serial/CacheCmd/cache/list (0.27s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (9.62s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node
functional_test.go:1141: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl images
functional_test.go:1141: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl images: (9.6222151s)
--- PASS: TestFunctional/serial/CacheCmd/cache/verify_cache_inside_node (9.62s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/cache_reload (37.09s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/cache_reload
functional_test.go:1164: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh sudo docker rmi registry.k8s.io/pause:latest
functional_test.go:1164: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh sudo docker rmi registry.k8s.io/pause:latest: (9.6274295s)
functional_test.go:1170: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1170: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl inspecti registry.k8s.io/pause:latest: exit status 1 (9.5829998s)

                                                
                                                
-- stdout --
	FATA[0000] no such image "registry.k8s.io/pause:latest" present 

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 1

                                                
                                                
** /stderr **
functional_test.go:1175: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cache reload
functional_test.go:1175: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cache reload: (8.4137932s)
functional_test.go:1180: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl inspecti registry.k8s.io/pause:latest
functional_test.go:1180: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh sudo crictl inspecti registry.k8s.io/pause:latest: (9.4676306s)
--- PASS: TestFunctional/serial/CacheCmd/cache/cache_reload (37.09s)

                                                
                                    
x
+
TestFunctional/serial/CacheCmd/cache/delete (0.54s)

                                                
                                                
=== RUN   TestFunctional/serial/CacheCmd/cache/delete
functional_test.go:1189: (dbg) Run:  out/minikube-windows-amd64.exe cache delete registry.k8s.io/pause:3.1
functional_test.go:1189: (dbg) Run:  out/minikube-windows-amd64.exe cache delete registry.k8s.io/pause:latest
--- PASS: TestFunctional/serial/CacheCmd/cache/delete (0.54s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmd (0.52s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmd
functional_test.go:733: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 kubectl -- --context functional-873100 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmd (0.52s)

                                                
                                    
x
+
TestFunctional/serial/MinikubeKubectlCmdDirectly (3.01s)

                                                
                                                
=== RUN   TestFunctional/serial/MinikubeKubectlCmdDirectly
functional_test.go:758: (dbg) Run:  out\kubectl.exe --context functional-873100 get pods
--- PASS: TestFunctional/serial/MinikubeKubectlCmdDirectly (3.01s)

                                                
                                    
x
+
TestFunctional/serial/LogsCmd (168.2s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsCmd
functional_test.go:1253: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs
E0408 18:26:52.223706    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
functional_test.go:1253: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs: (2m48.1991737s)
--- PASS: TestFunctional/serial/LogsCmd (168.20s)

                                                
                                    
x
+
TestFunctional/serial/LogsFileCmd (180.75s)

                                                
                                                
=== RUN   TestFunctional/serial/LogsFileCmd
functional_test.go:1267: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 logs --file C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalserialLogsFileCmd1510467260\001\logs.txt
functional_test.go:1267: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 logs --file C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalserialLogsFileCmd1510467260\001\logs.txt: (3m0.7458932s)
--- PASS: TestFunctional/serial/LogsFileCmd (180.75s)

                                                
                                    
x
+
TestFunctional/parallel/ConfigCmd (1.84s)

                                                
                                                
=== RUN   TestFunctional/parallel/ConfigCmd
=== PAUSE TestFunctional/parallel/ConfigCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/ConfigCmd
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config unset cpus
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config get cpus
functional_test.go:1216: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 config get cpus: exit status 14 (263.8053ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config set cpus 2
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config get cpus
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config unset cpus
functional_test.go:1216: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 config get cpus
functional_test.go:1216: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 config get cpus: exit status 14 (263.4131ms)

                                                
                                                
** stderr ** 
	Error: specified key could not be found in config

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/ConfigCmd (1.84s)

                                                
                                    
x
+
TestFunctional/parallel/AddonsCmd (0.62s)

                                                
                                                
=== RUN   TestFunctional/parallel/AddonsCmd
=== PAUSE TestFunctional/parallel/AddonsCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/AddonsCmd
functional_test.go:1707: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 addons list
functional_test.go:1719: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 addons list -o json
--- PASS: TestFunctional/parallel/AddonsCmd (0.62s)

                                                
                                    
x
+
TestFunctional/parallel/SSHCmd (19.21s)

                                                
                                                
=== RUN   TestFunctional/parallel/SSHCmd
=== PAUSE TestFunctional/parallel/SSHCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/SSHCmd
functional_test.go:1742: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "echo hello"
functional_test.go:1742: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "echo hello": (9.8514648s)
functional_test.go:1759: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "cat /etc/hostname"
functional_test.go:1759: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "cat /etc/hostname": (9.3582007s)
--- PASS: TestFunctional/parallel/SSHCmd (19.21s)

                                                
                                    
x
+
TestFunctional/parallel/CpCmd (62.45s)

                                                
                                                
=== RUN   TestFunctional/parallel/CpCmd
=== PAUSE TestFunctional/parallel/CpCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CpCmd
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cp testdata\cp-test.txt /home/docker/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cp testdata\cp-test.txt /home/docker/cp-test.txt: (9.0471287s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /home/docker/cp-test.txt": (11.2295285s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cp functional-873100:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalparallelCpCmd1674743440\001\cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cp functional-873100:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestFunctionalparallelCpCmd1674743440\001\cp-test.txt: (11.3034524s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /home/docker/cp-test.txt": (11.1691553s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 cp testdata\cp-test.txt /tmp/does/not/exist/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 cp testdata\cp-test.txt /tmp/does/not/exist/cp-test.txt: (8.2867378s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /tmp/does/not/exist/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh -n functional-873100 "sudo cat /tmp/does/not/exist/cp-test.txt": (11.4026953s)
--- PASS: TestFunctional/parallel/CpCmd (62.45s)

                                                
                                    
x
+
TestFunctional/parallel/FileSync (9.43s)

                                                
                                                
=== RUN   TestFunctional/parallel/FileSync
=== PAUSE TestFunctional/parallel/FileSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/FileSync
functional_test.go:1946: Checking for existence of /etc/test/nested/copy/7904/hosts within VM
functional_test.go:1948: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/test/nested/copy/7904/hosts"
functional_test.go:1948: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/test/nested/copy/7904/hosts": (9.4246835s)
functional_test.go:1953: file sync test content: Test file for checking file sync process
--- PASS: TestFunctional/parallel/FileSync (9.43s)

                                                
                                    
x
+
TestFunctional/parallel/CertSync (57.94s)

                                                
                                                
=== RUN   TestFunctional/parallel/CertSync
=== PAUSE TestFunctional/parallel/CertSync

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/CertSync
functional_test.go:1989: Checking for existence of /etc/ssl/certs/7904.pem within VM
functional_test.go:1990: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/7904.pem"
functional_test.go:1990: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/7904.pem": (9.7926803s)
functional_test.go:1989: Checking for existence of /usr/share/ca-certificates/7904.pem within VM
functional_test.go:1990: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /usr/share/ca-certificates/7904.pem"
functional_test.go:1990: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /usr/share/ca-certificates/7904.pem": (9.7959779s)
functional_test.go:1989: Checking for existence of /etc/ssl/certs/51391683.0 within VM
functional_test.go:1990: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/51391683.0"
functional_test.go:1990: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/51391683.0": (9.4207115s)
functional_test.go:2016: Checking for existence of /etc/ssl/certs/79042.pem within VM
functional_test.go:2017: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/79042.pem"
functional_test.go:2017: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/79042.pem": (9.4952144s)
functional_test.go:2016: Checking for existence of /usr/share/ca-certificates/79042.pem within VM
functional_test.go:2017: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /usr/share/ca-certificates/79042.pem"
functional_test.go:2017: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /usr/share/ca-certificates/79042.pem": (9.4522751s)
functional_test.go:2016: Checking for existence of /etc/ssl/certs/3ec20f2e.0 within VM
functional_test.go:2017: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0"
functional_test.go:2017: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo cat /etc/ssl/certs/3ec20f2e.0": (9.9771324s)
--- PASS: TestFunctional/parallel/CertSync (57.94s)

                                                
                                    
x
+
TestFunctional/parallel/NonActiveRuntimeDisabled (9.6s)

                                                
                                                
=== RUN   TestFunctional/parallel/NonActiveRuntimeDisabled
=== PAUSE TestFunctional/parallel/NonActiveRuntimeDisabled

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/NonActiveRuntimeDisabled
functional_test.go:2044: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo systemctl is-active crio"
functional_test.go:2044: (dbg) Non-zero exit: out/minikube-windows-amd64.exe -p functional-873100 ssh "sudo systemctl is-active crio": exit status 1 (9.6042953s)

                                                
                                                
-- stdout --
	inactive

                                                
                                                
-- /stdout --
** stderr ** 
	ssh: Process exited with status 3

                                                
                                                
** /stderr **
--- PASS: TestFunctional/parallel/NonActiveRuntimeDisabled (9.60s)

                                                
                                    
x
+
TestFunctional/parallel/License (1.61s)

                                                
                                                
=== RUN   TestFunctional/parallel/License
=== PAUSE TestFunctional/parallel/License

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/License
functional_test.go:2305: (dbg) Run:  out/minikube-windows-amd64.exe license
functional_test.go:2305: (dbg) Done: out/minikube-windows-amd64.exe license: (1.5957536s)
--- PASS: TestFunctional/parallel/License (1.61s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_not_create (15.17s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_not_create
functional_test.go:1287: (dbg) Run:  out/minikube-windows-amd64.exe profile lis
functional_test.go:1292: (dbg) Run:  out/minikube-windows-amd64.exe profile list --output json
functional_test.go:1292: (dbg) Done: out/minikube-windows-amd64.exe profile list --output json: (14.7989955s)
--- PASS: TestFunctional/parallel/ProfileCmd/profile_not_create (15.17s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_list (14.87s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_list
functional_test.go:1327: (dbg) Run:  out/minikube-windows-amd64.exe profile list
functional_test.go:1327: (dbg) Done: out/minikube-windows-amd64.exe profile list: (14.5838702s)
functional_test.go:1332: Took "14.5839814s" to run "out/minikube-windows-amd64.exe profile list"
functional_test.go:1341: (dbg) Run:  out/minikube-windows-amd64.exe profile list -l
functional_test.go:1346: Took "285.6504ms" to run "out/minikube-windows-amd64.exe profile list -l"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_list (14.87s)

                                                
                                    
x
+
TestFunctional/parallel/ProfileCmd/profile_json_output (14.35s)

                                                
                                                
=== RUN   TestFunctional/parallel/ProfileCmd/profile_json_output
functional_test.go:1378: (dbg) Run:  out/minikube-windows-amd64.exe profile list -o json
functional_test.go:1378: (dbg) Done: out/minikube-windows-amd64.exe profile list -o json: (14.0967389s)
functional_test.go:1383: Took "14.0972293s" to run "out/minikube-windows-amd64.exe profile list -o json"
functional_test.go:1391: (dbg) Run:  out/minikube-windows-amd64.exe profile list -o json --light
functional_test.go:1396: Took "253.9595ms" to run "out/minikube-windows-amd64.exe profile list -o json --light"
--- PASS: TestFunctional/parallel/ProfileCmd/profile_json_output (14.35s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.01s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/StartTunnel
functional_test_tunnel_test.go:129: (dbg) daemon: [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr]
--- PASS: TestFunctional/parallel/TunnelCmd/serial/StartTunnel (0.01s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel
functional_test_tunnel_test.go:434: (dbg) stopping [out/minikube-windows-amd64.exe -p functional-873100 tunnel --alsologtostderr] ...
helpers_test.go:508: unable to kill pid 10340: OpenProcess: The parameter is incorrect.
helpers_test.go:502: unable to terminate pid 12688: Access is denied.
functional_test_tunnel_test.go:437: failed to stop process: exit status 103
--- PASS: TestFunctional/parallel/TunnelCmd/serial/DeleteTunnel (0.11s)

                                                
                                    
x
+
TestFunctional/parallel/Version/short (0.27s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/short
=== PAUSE TestFunctional/parallel/Version/short

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/short
functional_test.go:2273: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 version --short
--- PASS: TestFunctional/parallel/Version/short (0.27s)

                                                
                                    
x
+
TestFunctional/parallel/Version/components (8.14s)

                                                
                                                
=== RUN   TestFunctional/parallel/Version/components
=== PAUSE TestFunctional/parallel/Version/components

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/Version/components
functional_test.go:2287: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 version -o=json --components
functional_test.go:2287: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 version -o=json --components: (8.1406566s)
--- PASS: TestFunctional/parallel/Version/components (8.14s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/Setup (2.04s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/Setup
functional_test.go:359: (dbg) Run:  docker pull kicbase/echo-server:1.0
functional_test.go:359: (dbg) Done: docker pull kicbase/echo-server:1.0: (1.9153635s)
functional_test.go:364: (dbg) Run:  docker tag kicbase/echo-server:1.0 kicbase/echo-server:functional-873100
--- PASS: TestFunctional/parallel/ImageCommands/Setup (2.04s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageRemove (120.43s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageRemove
functional_test.go:409: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image rm kicbase/echo-server:functional-873100 --alsologtostderr
functional_test.go:409: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image rm kicbase/echo-server:functional-873100 --alsologtostderr: (1m0.1231587s)
functional_test.go:468: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image ls
functional_test.go:468: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image ls: (1m0.3107405s)
--- PASS: TestFunctional/parallel/ImageCommands/ImageRemove (120.43s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_changes (2.57s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_changes
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_changes

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_changes
functional_test.go:2136: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2
functional_test.go:2136: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2: (2.5695462s)
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_changes (2.57s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (2.59s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster
functional_test.go:2136: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2
functional_test.go:2136: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2: (2.5905695s)
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_minikube_cluster (2.59s)

                                                
                                    
x
+
TestFunctional/parallel/UpdateContextCmd/no_clusters (2.65s)

                                                
                                                
=== RUN   TestFunctional/parallel/UpdateContextCmd/no_clusters
=== PAUSE TestFunctional/parallel/UpdateContextCmd/no_clusters

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/UpdateContextCmd/no_clusters
functional_test.go:2136: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2
functional_test.go:2136: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 update-context --alsologtostderr -v=2: (2.647319s)
--- PASS: TestFunctional/parallel/UpdateContextCmd/no_clusters (2.65s)

                                                
                                    
x
+
TestFunctional/parallel/ImageCommands/ImageSaveDaemon (60.23s)

                                                
                                                
=== RUN   TestFunctional/parallel/ImageCommands/ImageSaveDaemon
functional_test.go:436: (dbg) Run:  docker rmi kicbase/echo-server:functional-873100
functional_test.go:441: (dbg) Run:  out/minikube-windows-amd64.exe -p functional-873100 image save --daemon kicbase/echo-server:functional-873100 --alsologtostderr
E0408 18:41:52.229564    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
functional_test.go:441: (dbg) Done: out/minikube-windows-amd64.exe -p functional-873100 image save --daemon kicbase/echo-server:functional-873100 --alsologtostderr: (1m0.0187996s)
functional_test.go:449: (dbg) Run:  docker image inspect kicbase/echo-server:functional-873100
--- PASS: TestFunctional/parallel/ImageCommands/ImageSaveDaemon (60.23s)

                                                
                                    
x
+
TestFunctional/delete_echo-server_images (0.2s)

                                                
                                                
=== RUN   TestFunctional/delete_echo-server_images
functional_test.go:207: (dbg) Run:  docker rmi -f kicbase/echo-server:1.0
functional_test.go:207: (dbg) Run:  docker rmi -f kicbase/echo-server:functional-873100
--- PASS: TestFunctional/delete_echo-server_images (0.20s)

                                                
                                    
x
+
TestFunctional/delete_my-image_image (0.1s)

                                                
                                                
=== RUN   TestFunctional/delete_my-image_image
functional_test.go:215: (dbg) Run:  docker rmi -f localhost/my-image:functional-873100
--- PASS: TestFunctional/delete_my-image_image (0.10s)

                                                
                                    
x
+
TestFunctional/delete_minikube_cached_images (0.09s)

                                                
                                                
=== RUN   TestFunctional/delete_minikube_cached_images
functional_test.go:223: (dbg) Run:  docker rmi -f minikube-local-cache-test:functional-873100
--- PASS: TestFunctional/delete_minikube_cached_images (0.09s)

                                                
                                    
x
+
TestMultiControlPlane/serial/StartCluster (727.03s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/StartCluster
ha_test.go:101: (dbg) Run:  out/minikube-windows-amd64.exe start -p ha-089400 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=hyperv
E0408 18:51:05.438202    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.445353    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.456977    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.480290    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.521880    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.603549    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:05.765323    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:06.088070    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:06.730702    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:08.012516    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:10.575540    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:15.698031    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:25.940635    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:46.423813    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:51:52.234673    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:52:27.386060    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:53:49.309744    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:56:05.440083    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:56:33.153703    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:56:35.319663    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 18:56:52.236994    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
ha_test.go:101: (dbg) Done: out/minikube-windows-amd64.exe start -p ha-089400 --wait=true --memory=2200 --ha -v=7 --alsologtostderr --driver=hyperv: (11m29.5136764s)
ha_test.go:107: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr
ha_test.go:107: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr: (37.5143681s)
--- PASS: TestMultiControlPlane/serial/StartCluster (727.03s)

                                                
                                    
x
+
TestMultiControlPlane/serial/DeployApp (14.41s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/DeployApp
ha_test.go:128: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- apply -f ./testdata/ha/ha-pod-dns-test.yaml
ha_test.go:133: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- rollout status deployment/busybox
ha_test.go:133: (dbg) Done: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- rollout status deployment/busybox: (5.1738835s)
ha_test.go:140: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- get pods -o jsonpath='{.items[*].status.podIP}'
ha_test.go:163: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- get pods -o jsonpath='{.items[*].metadata.name}'
ha_test.go:171: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- nslookup kubernetes.io
ha_test.go:171: (dbg) Done: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- nslookup kubernetes.io: (2.1069088s)
ha_test.go:171: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- nslookup kubernetes.io
ha_test.go:171: (dbg) Done: out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- nslookup kubernetes.io: (1.6102359s)
ha_test.go:171: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- nslookup kubernetes.io
ha_test.go:181: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- nslookup kubernetes.default
ha_test.go:181: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- nslookup kubernetes.default
ha_test.go:189: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-d76nt -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-lwn24 -- nslookup kubernetes.default.svc.cluster.local
ha_test.go:189: (dbg) Run:  out/minikube-windows-amd64.exe kubectl -p ha-089400 -- exec busybox-58667487b6-snc97 -- nslookup kubernetes.default.svc.cluster.local
--- PASS: TestMultiControlPlane/serial/DeployApp (14.41s)

                                                
                                    
x
+
TestMultiControlPlane/serial/AddWorkerNode (272.26s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/AddWorkerNode
ha_test.go:228: (dbg) Run:  out/minikube-windows-amd64.exe node add -p ha-089400 -v=7 --alsologtostderr
E0408 19:01:05.442350    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:01:52.239514    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
ha_test.go:228: (dbg) Done: out/minikube-windows-amd64.exe node add -p ha-089400 -v=7 --alsologtostderr: (3m42.1200992s)
ha_test.go:234: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr
ha_test.go:234: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 status -v=7 --alsologtostderr: (50.1432418s)
--- PASS: TestMultiControlPlane/serial/AddWorkerNode (272.26s)

                                                
                                    
x
+
TestMultiControlPlane/serial/NodeLabels (0.19s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/NodeLabels
ha_test.go:255: (dbg) Run:  kubectl --context ha-089400 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiControlPlane/serial/NodeLabels (0.19s)

                                                
                                    
x
+
TestMultiControlPlane/serial/HAppyAfterClusterStart (50.05s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/HAppyAfterClusterStart
ha_test.go:281: (dbg) Run:  out/minikube-windows-amd64.exe profile list --output json
E0408 19:06:05.448364    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
ha_test.go:281: (dbg) Done: out/minikube-windows-amd64.exe profile list --output json: (50.0501637s)
--- PASS: TestMultiControlPlane/serial/HAppyAfterClusterStart (50.05s)

                                                
                                    
x
+
TestMultiControlPlane/serial/CopyFile (653.38s)

                                                
                                                
=== RUN   TestMultiControlPlane/serial/CopyFile
ha_test.go:328: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 status --output json -v=7 --alsologtostderr
E0408 19:06:52.244298    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
ha_test.go:328: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 status --output json -v=7 --alsologtostderr: (50.4159103s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400:/home/docker/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400:/home/docker/cp-test.txt: (10.1396365s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt"
E0408 19:07:28.521602    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt": (10.178104s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400.txt: (10.1707121s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt": (10.184889s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400_ha-089400-m02.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400_ha-089400-m02.txt: (17.6235903s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt": (10.1351703s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m02.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m02.txt": (9.9732479s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400_ha-089400-m03.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400_ha-089400-m03.txt: (17.5525398s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt": (10.0970182s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m03.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m03.txt": (10.0243181s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400_ha-089400-m04.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400_ha-089400-m04.txt: (17.2040835s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test.txt": (9.8239457s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m04.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400_ha-089400-m04.txt": (9.8972996s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m02:/home/docker/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m02:/home/docker/cp-test.txt: (9.8699562s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt": (9.7528485s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m02.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m02.txt: (9.7548136s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt": (9.8031881s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m02_ha-089400.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m02_ha-089400.txt: (17.3397924s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt": (9.8905329s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400.txt": (9.8419571s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400-m02_ha-089400-m03.txt
E0408 19:11:05.447359    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400-m02_ha-089400-m03.txt: (17.02234s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt": (10.1464866s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400-m03.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400-m03.txt": (9.7774195s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400-m02_ha-089400-m04.txt
E0408 19:11:52.244363    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m02:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400-m02_ha-089400-m04.txt: (17.0821697s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test.txt": (9.9053234s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400-m04.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400-m02_ha-089400-m04.txt": (9.8505537s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m03:/home/docker/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m03:/home/docker/cp-test.txt: (9.8457942s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt": (9.8554518s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m03.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m03.txt: (9.8807542s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt": (9.8593109s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m03_ha-089400.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m03_ha-089400.txt: (17.178443s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt"
E0408 19:13:15.330347    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt": (9.7978866s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400.txt": (9.8234461s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt: (17.0907375s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt": (9.8105226s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400-m02.txt": (9.7731352s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m03:/home/docker/cp-test.txt ha-089400-m04:/home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt: (17.1288812s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test.txt": (9.8824997s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test_ha-089400-m03_ha-089400-m04.txt": (9.9204048s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m04:/home/docker/cp-test.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp testdata\cp-test.txt ha-089400-m04:/home/docker/cp-test.txt: (9.8790425s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt": (9.7829986s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m04.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt C:\Users\jenkins.minikube3\AppData\Local\Temp\TestMultiControlPlaneserialCopyFile2929288862\001\cp-test_ha-089400-m04.txt: (9.8607584s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt": (9.8074345s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m04_ha-089400.txt
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400:/home/docker/cp-test_ha-089400-m04_ha-089400.txt: (17.3148723s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt": (9.9810627s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400.txt": (9.9234328s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt
E0408 19:16:05.450029    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400-m02:/home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt: (17.153143s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt": (9.7750916s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m02 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400-m02.txt": (9.7815456s)
helpers_test.go:556: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt
E0408 19:16:52.246957    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:556: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 cp ha-089400-m04:/home/docker/cp-test.txt ha-089400-m03:/home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt: (17.1198498s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m04 "sudo cat /home/docker/cp-test.txt": (9.839926s)
helpers_test.go:534: (dbg) Run:  out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt"
helpers_test.go:534: (dbg) Done: out/minikube-windows-amd64.exe -p ha-089400 ssh -n ha-089400-m03 "sudo cat /home/docker/cp-test_ha-089400-m04_ha-089400-m03.txt": (9.8164509s)
--- PASS: TestMultiControlPlane/serial/CopyFile (653.38s)

                                                
                                    
x
+
TestImageBuild/serial/Setup (200.14s)

                                                
                                                
=== RUN   TestImageBuild/serial/Setup
image_test.go:69: (dbg) Run:  out/minikube-windows-amd64.exe start -p image-414600 --driver=hyperv
E0408 19:21:05.452017    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:21:52.248841    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
image_test.go:69: (dbg) Done: out/minikube-windows-amd64.exe start -p image-414600 --driver=hyperv: (3m20.1425584s)
--- PASS: TestImageBuild/serial/Setup (200.14s)

                                                
                                    
x
+
TestImageBuild/serial/NormalBuild (10.75s)

                                                
                                                
=== RUN   TestImageBuild/serial/NormalBuild
image_test.go:78: (dbg) Run:  out/minikube-windows-amd64.exe image build -t aaa:latest ./testdata/image-build/test-normal -p image-414600
E0408 19:24:08.531513    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
image_test.go:78: (dbg) Done: out/minikube-windows-amd64.exe image build -t aaa:latest ./testdata/image-build/test-normal -p image-414600: (10.7514906s)
--- PASS: TestImageBuild/serial/NormalBuild (10.75s)

                                                
                                    
x
+
TestImageBuild/serial/BuildWithBuildArg (8.97s)

                                                
                                                
=== RUN   TestImageBuild/serial/BuildWithBuildArg
image_test.go:99: (dbg) Run:  out/minikube-windows-amd64.exe image build -t aaa:latest --build-opt=build-arg=ENV_A=test_env_str --build-opt=no-cache ./testdata/image-build/test-arg -p image-414600
image_test.go:99: (dbg) Done: out/minikube-windows-amd64.exe image build -t aaa:latest --build-opt=build-arg=ENV_A=test_env_str --build-opt=no-cache ./testdata/image-build/test-arg -p image-414600: (8.9657394s)
--- PASS: TestImageBuild/serial/BuildWithBuildArg (8.97s)

                                                
                                    
x
+
TestImageBuild/serial/BuildWithDockerIgnore (8.24s)

                                                
                                                
=== RUN   TestImageBuild/serial/BuildWithDockerIgnore
image_test.go:133: (dbg) Run:  out/minikube-windows-amd64.exe image build -t aaa:latest ./testdata/image-build/test-normal --build-opt=no-cache -p image-414600
image_test.go:133: (dbg) Done: out/minikube-windows-amd64.exe image build -t aaa:latest ./testdata/image-build/test-normal --build-opt=no-cache -p image-414600: (8.2414679s)
--- PASS: TestImageBuild/serial/BuildWithDockerIgnore (8.24s)

                                                
                                    
x
+
TestImageBuild/serial/BuildWithSpecifiedDockerfile (8.3s)

                                                
                                                
=== RUN   TestImageBuild/serial/BuildWithSpecifiedDockerfile
image_test.go:88: (dbg) Run:  out/minikube-windows-amd64.exe image build -t aaa:latest -f inner/Dockerfile ./testdata/image-build/test-f -p image-414600
image_test.go:88: (dbg) Done: out/minikube-windows-amd64.exe image build -t aaa:latest -f inner/Dockerfile ./testdata/image-build/test-f -p image-414600: (8.3029265s)
--- PASS: TestImageBuild/serial/BuildWithSpecifiedDockerfile (8.30s)

                                                
                                    
x
+
TestJSONOutput/start/Command (233.95s)

                                                
                                                
=== RUN   TestJSONOutput/start/Command
json_output_test.go:63: (dbg) Run:  out/minikube-windows-amd64.exe start -p json-output-059800 --output=json --user=testUser --memory=2200 --wait=true --driver=hyperv
E0408 19:26:05.454942    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:26:52.250614    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
json_output_test.go:63: (dbg) Done: out/minikube-windows-amd64.exe start -p json-output-059800 --output=json --user=testUser --memory=2200 --wait=true --driver=hyperv: (3m53.9509938s)
--- PASS: TestJSONOutput/start/Command (233.95s)

                                                
                                    
x
+
TestJSONOutput/start/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/Audit
--- PASS: TestJSONOutput/start/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/start/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/start/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/start/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/start/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/start/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/start/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/Command (8.19s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-windows-amd64.exe pause -p json-output-059800 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-windows-amd64.exe pause -p json-output-059800 --output=json --user=testUser: (8.1940124s)
--- PASS: TestJSONOutput/pause/Command (8.19s)

                                                
                                    
x
+
TestJSONOutput/pause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/Audit
--- PASS: TestJSONOutput/pause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/pause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/pause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/pause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/pause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/Command (8.05s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Command
json_output_test.go:63: (dbg) Run:  out/minikube-windows-amd64.exe unpause -p json-output-059800 --output=json --user=testUser
json_output_test.go:63: (dbg) Done: out/minikube-windows-amd64.exe unpause -p json-output-059800 --output=json --user=testUser: (8.0544296s)
--- PASS: TestJSONOutput/unpause/Command (8.05s)

                                                
                                    
x
+
TestJSONOutput/unpause/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/Audit
--- PASS: TestJSONOutput/unpause/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/unpause/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/unpause/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/unpause/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/Command (35.25s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Command
json_output_test.go:63: (dbg) Run:  out/minikube-windows-amd64.exe stop -p json-output-059800 --output=json --user=testUser
E0408 19:29:55.339748    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
json_output_test.go:63: (dbg) Done: out/minikube-windows-amd64.exe stop -p json-output-059800 --output=json --user=testUser: (35.2457212s)
--- PASS: TestJSONOutput/stop/Command (35.25s)

                                                
                                    
x
+
TestJSONOutput/stop/Audit (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/Audit
--- PASS: TestJSONOutput/stop/Audit (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/DistinctCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/DistinctCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/DistinctCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/DistinctCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/DistinctCurrentSteps (0.00s)

                                                
                                    
x
+
TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0s)

                                                
                                                
=== RUN   TestJSONOutput/stop/parallel/IncreasingCurrentSteps
=== PAUSE TestJSONOutput/stop/parallel/IncreasingCurrentSteps

                                                
                                                

                                                
                                                
=== CONT  TestJSONOutput/stop/parallel/IncreasingCurrentSteps
--- PASS: TestJSONOutput/stop/parallel/IncreasingCurrentSteps (0.00s)

                                                
                                    
x
+
TestErrorJSONOutput (1s)

                                                
                                                
=== RUN   TestErrorJSONOutput
json_output_test.go:160: (dbg) Run:  out/minikube-windows-amd64.exe start -p json-output-error-393600 --memory=2200 --output=json --wait=true --driver=fail
json_output_test.go:160: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p json-output-error-393600 --memory=2200 --output=json --wait=true --driver=fail: exit status 56 (304.5121ms)

                                                
                                                
-- stdout --
	{"specversion":"1.0","id":"1cef968d-f124-4cc4-8747-72110244d917","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.step","datacontenttype":"application/json","data":{"currentstep":"0","message":"[json-output-error-393600] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679","name":"Initial Minikube Setup","totalsteps":"19"}}
	{"specversion":"1.0","id":"5ddb6b1a-b91d-4d7f-8bf3-2d3a3abc29ba","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"KUBECONFIG=C:\\Users\\jenkins.minikube3\\minikube-integration\\kubeconfig"}}
	{"specversion":"1.0","id":"49e4d392-3228-4de9-9b5b-7606404dea21","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_FORCE_SYSTEMD="}}
	{"specversion":"1.0","id":"3233ea66-83c9-4b31-b761-2509f58ddec3","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_HOME=C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube"}}
	{"specversion":"1.0","id":"59c7fde9-2668-4ef1-bf7f-85b2a49005f5","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_LOCATION=20604"}}
	{"specversion":"1.0","id":"1ccdbbfa-cf0f-4169-adf9-496dd7cf0d30","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.info","datacontenttype":"application/json","data":{"message":"MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true"}}
	{"specversion":"1.0","id":"a8c050d0-9acb-409b-8c5c-d0afdc9c25a1","source":"https://minikube.sigs.k8s.io/","type":"io.k8s.sigs.minikube.error","datacontenttype":"application/json","data":{"advice":"","exitcode":"56","issues":"","message":"The driver 'fail' is not supported on windows/amd64","name":"DRV_UNSUPPORTED_OS","url":""}}

                                                
                                                
-- /stdout --
helpers_test.go:175: Cleaning up "json-output-error-393600" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p json-output-error-393600
--- PASS: TestErrorJSONOutput (1.00s)

                                                
                                    
x
+
TestMainNoArgs (0.24s)

                                                
                                                
=== RUN   TestMainNoArgs
main_test.go:68: (dbg) Run:  out/minikube-windows-amd64.exe
--- PASS: TestMainNoArgs (0.24s)

                                                
                                    
x
+
TestMinikubeProfile (538.04s)

                                                
                                                
=== RUN   TestMinikubeProfile
minikube_profile_test.go:44: (dbg) Run:  out/minikube-windows-amd64.exe start -p first-128400 --driver=hyperv
E0408 19:31:05.456930    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:31:52.253890    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
minikube_profile_test.go:44: (dbg) Done: out/minikube-windows-amd64.exe start -p first-128400 --driver=hyperv: (3m19.3615113s)
minikube_profile_test.go:44: (dbg) Run:  out/minikube-windows-amd64.exe start -p second-128400 --driver=hyperv
E0408 19:36:05.460109    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:36:52.255621    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
minikube_profile_test.go:44: (dbg) Done: out/minikube-windows-amd64.exe start -p second-128400 --driver=hyperv: (3m22.2956916s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-windows-amd64.exe profile first-128400
minikube_profile_test.go:55: (dbg) Run:  out/minikube-windows-amd64.exe profile list -ojson
minikube_profile_test.go:55: (dbg) Done: out/minikube-windows-amd64.exe profile list -ojson: (24.5998806s)
minikube_profile_test.go:51: (dbg) Run:  out/minikube-windows-amd64.exe profile second-128400
minikube_profile_test.go:55: (dbg) Run:  out/minikube-windows-amd64.exe profile list -ojson
minikube_profile_test.go:55: (dbg) Done: out/minikube-windows-amd64.exe profile list -ojson: (24.7565584s)
helpers_test.go:175: Cleaning up "second-128400" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p second-128400
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p second-128400: (40.5119455s)
helpers_test.go:175: Cleaning up "first-128400" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p first-128400
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p first-128400: (45.8203666s)
--- PASS: TestMinikubeProfile (538.04s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountFirst (158.67s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountFirst
mount_start_test.go:98: (dbg) Run:  out/minikube-windows-amd64.exe start -p mount-start-1-514700 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=hyperv
E0408 19:40:48.542390    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:41:05.461425    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:41:52.257701    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
mount_start_test.go:98: (dbg) Done: out/minikube-windows-amd64.exe start -p mount-start-1-514700 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46464 --mount-uid 0 --no-kubernetes --driver=hyperv: (2m37.6726451s)
--- PASS: TestMountStart/serial/StartWithMountFirst (158.67s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountFirst (9.84s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountFirst
mount_start_test.go:114: (dbg) Run:  out/minikube-windows-amd64.exe -p mount-start-1-514700 ssh -- ls /minikube-host
mount_start_test.go:114: (dbg) Done: out/minikube-windows-amd64.exe -p mount-start-1-514700 ssh -- ls /minikube-host: (9.8436895s)
--- PASS: TestMountStart/serial/VerifyMountFirst (9.84s)

                                                
                                    
x
+
TestMountStart/serial/StartWithMountSecond (158.53s)

                                                
                                                
=== RUN   TestMountStart/serial/StartWithMountSecond
mount_start_test.go:98: (dbg) Run:  out/minikube-windows-amd64.exe start -p mount-start-2-514700 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=hyperv
mount_start_test.go:98: (dbg) Done: out/minikube-windows-amd64.exe start -p mount-start-2-514700 --memory=2048 --mount --mount-gid 0 --mount-msize 6543 --mount-port 46465 --mount-uid 0 --no-kubernetes --driver=hyperv: (2m37.5298523s)
--- PASS: TestMountStart/serial/StartWithMountSecond (158.53s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountSecond (9.89s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountSecond
mount_start_test.go:114: (dbg) Run:  out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host
mount_start_test.go:114: (dbg) Done: out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host: (9.8853004s)
--- PASS: TestMountStart/serial/VerifyMountSecond (9.89s)

                                                
                                    
x
+
TestMountStart/serial/DeleteFirst (28.08s)

                                                
                                                
=== RUN   TestMountStart/serial/DeleteFirst
pause_test.go:132: (dbg) Run:  out/minikube-windows-amd64.exe delete -p mount-start-1-514700 --alsologtostderr -v=5
pause_test.go:132: (dbg) Done: out/minikube-windows-amd64.exe delete -p mount-start-1-514700 --alsologtostderr -v=5: (28.0757491s)
--- PASS: TestMountStart/serial/DeleteFirst (28.08s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostDelete (10.32s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostDelete
mount_start_test.go:114: (dbg) Run:  out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host
mount_start_test.go:114: (dbg) Done: out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host: (10.3196541s)
--- PASS: TestMountStart/serial/VerifyMountPostDelete (10.32s)

                                                
                                    
x
+
TestMountStart/serial/Stop (32.38s)

                                                
                                                
=== RUN   TestMountStart/serial/Stop
mount_start_test.go:155: (dbg) Run:  out/minikube-windows-amd64.exe stop -p mount-start-2-514700
E0408 19:46:05.464273    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
mount_start_test.go:155: (dbg) Done: out/minikube-windows-amd64.exe stop -p mount-start-2-514700: (32.3822617s)
--- PASS: TestMountStart/serial/Stop (32.38s)

                                                
                                    
x
+
TestMountStart/serial/RestartStopped (129.31s)

                                                
                                                
=== RUN   TestMountStart/serial/RestartStopped
mount_start_test.go:166: (dbg) Run:  out/minikube-windows-amd64.exe start -p mount-start-2-514700
E0408 19:46:35.351202    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 19:46:52.260548    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
mount_start_test.go:166: (dbg) Done: out/minikube-windows-amd64.exe start -p mount-start-2-514700: (2m8.3130381s)
--- PASS: TestMountStart/serial/RestartStopped (129.31s)

                                                
                                    
x
+
TestMountStart/serial/VerifyMountPostStop (10.36s)

                                                
                                                
=== RUN   TestMountStart/serial/VerifyMountPostStop
mount_start_test.go:114: (dbg) Run:  out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host
mount_start_test.go:114: (dbg) Done: out/minikube-windows-amd64.exe -p mount-start-2-514700 ssh -- ls /minikube-host: (10.3589077s)
--- PASS: TestMountStart/serial/VerifyMountPostStop (10.36s)

                                                
                                    
x
+
TestMultiNode/serial/MultiNodeLabels (0.18s)

                                                
                                                
=== RUN   TestMultiNode/serial/MultiNodeLabels
multinode_test.go:221: (dbg) Run:  kubectl --context multinode-095200 get nodes -o "jsonpath=[{range .items[*]}{.metadata.labels},{end}]"
--- PASS: TestMultiNode/serial/MultiNodeLabels (0.18s)

                                                
                                    
x
+
TestMultiNode/serial/ProfileList (36.55s)

                                                
                                                
=== RUN   TestMultiNode/serial/ProfileList
multinode_test.go:143: (dbg) Run:  out/minikube-windows-amd64.exe profile list --output json
multinode_test.go:143: (dbg) Done: out/minikube-windows-amd64.exe profile list --output json: (36.5480772s)
--- PASS: TestMultiNode/serial/ProfileList (36.55s)

                                                
                                    
x
+
TestPreload (565.77s)

                                                
                                                
=== RUN   TestPreload
preload_test.go:44: (dbg) Run:  out/minikube-windows-amd64.exe start -p test-preload-800000 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=hyperv --kubernetes-version=v1.24.4
E0408 20:21:52.276822    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:26:05.482389    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
preload_test.go:44: (dbg) Done: out/minikube-windows-amd64.exe start -p test-preload-800000 --memory=2200 --alsologtostderr --wait=true --preload=false --driver=hyperv --kubernetes-version=v1.24.4: (4m32.4974945s)
preload_test.go:52: (dbg) Run:  out/minikube-windows-amd64.exe -p test-preload-800000 image pull gcr.io/k8s-minikube/busybox
preload_test.go:52: (dbg) Done: out/minikube-windows-amd64.exe -p test-preload-800000 image pull gcr.io/k8s-minikube/busybox: (8.9471199s)
preload_test.go:58: (dbg) Run:  out/minikube-windows-amd64.exe stop -p test-preload-800000
E0408 20:26:52.280392    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
preload_test.go:58: (dbg) Done: out/minikube-windows-amd64.exe stop -p test-preload-800000: (40.0327314s)
preload_test.go:66: (dbg) Run:  out/minikube-windows-amd64.exe start -p test-preload-800000 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=hyperv
preload_test.go:66: (dbg) Done: out/minikube-windows-amd64.exe start -p test-preload-800000 --memory=2200 --alsologtostderr -v=1 --wait=true --driver=hyperv: (3m14.8034283s)
preload_test.go:71: (dbg) Run:  out/minikube-windows-amd64.exe -p test-preload-800000 image list
preload_test.go:71: (dbg) Done: out/minikube-windows-amd64.exe -p test-preload-800000 image list: (7.4826996s)
helpers_test.go:175: Cleaning up "test-preload-800000" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p test-preload-800000
E0408 20:30:48.574141    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
E0408 20:31:05.486097    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p test-preload-800000: (42.0007007s)
--- PASS: TestPreload (565.77s)

                                                
                                    
x
+
TestScheduledStopWindows (332.97s)

                                                
                                                
=== RUN   TestScheduledStopWindows
scheduled_stop_test.go:128: (dbg) Run:  out/minikube-windows-amd64.exe start -p scheduled-stop-897500 --memory=2048 --driver=hyperv
E0408 20:31:52.281587    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
scheduled_stop_test.go:128: (dbg) Done: out/minikube-windows-amd64.exe start -p scheduled-stop-897500 --memory=2048 --driver=hyperv: (3m19.5824013s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-windows-amd64.exe stop -p scheduled-stop-897500 --schedule 5m
scheduled_stop_test.go:137: (dbg) Done: out/minikube-windows-amd64.exe stop -p scheduled-stop-897500 --schedule 5m: (10.6848335s)
scheduled_stop_test.go:191: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.TimeToStop}} -p scheduled-stop-897500 -n scheduled-stop-897500
scheduled_stop_test.go:191: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.TimeToStop}} -p scheduled-stop-897500 -n scheduled-stop-897500: exit status 1 (10.0124435s)
scheduled_stop_test.go:191: status error: exit status 1 (may be ok)
scheduled_stop_test.go:54: (dbg) Run:  out/minikube-windows-amd64.exe ssh -p scheduled-stop-897500 -- sudo systemctl show minikube-scheduled-stop --no-page
scheduled_stop_test.go:54: (dbg) Done: out/minikube-windows-amd64.exe ssh -p scheduled-stop-897500 -- sudo systemctl show minikube-scheduled-stop --no-page: (9.7206685s)
scheduled_stop_test.go:137: (dbg) Run:  out/minikube-windows-amd64.exe stop -p scheduled-stop-897500 --schedule 5s
scheduled_stop_test.go:137: (dbg) Done: out/minikube-windows-amd64.exe stop -p scheduled-stop-897500 --schedule 5s: (10.8551941s)
E0408 20:36:05.487241    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\functional-873100\\client.crt: The system cannot find the path specified." logger="UnhandledError"
scheduled_stop_test.go:205: (dbg) Run:  out/minikube-windows-amd64.exe status -p scheduled-stop-897500
scheduled_stop_test.go:205: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status -p scheduled-stop-897500: exit status 7 (2.4484645s)

                                                
                                                
-- stdout --
	scheduled-stop-897500
	type: Control Plane
	host: Stopped
	kubelet: Stopped
	apiserver: Stopped
	kubeconfig: Stopped
	

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: (dbg) Run:  out/minikube-windows-amd64.exe status --format={{.Host}} -p scheduled-stop-897500 -n scheduled-stop-897500
scheduled_stop_test.go:176: (dbg) Non-zero exit: out/minikube-windows-amd64.exe status --format={{.Host}} -p scheduled-stop-897500 -n scheduled-stop-897500: exit status 7 (2.5045114s)

                                                
                                                
-- stdout --
	Stopped

                                                
                                                
-- /stdout --
scheduled_stop_test.go:176: status error: exit status 7 (may be ok)
helpers_test.go:175: Cleaning up "scheduled-stop-897500" profile ...
helpers_test.go:178: (dbg) Run:  out/minikube-windows-amd64.exe delete -p scheduled-stop-897500
E0408 20:36:35.382449    7904 cert_rotation.go:171] "Unhandled Error" err="key failed with : open C:\\Users\\jenkins.minikube3\\minikube-integration\\.minikube\\profiles\\addons-847800\\client.crt: The system cannot find the path specified." logger="UnhandledError"
helpers_test.go:178: (dbg) Done: out/minikube-windows-amd64.exe delete -p scheduled-stop-897500: (27.1574115s)
--- PASS: TestScheduledStopWindows (332.97s)

                                                
                                    
x
+
TestNoKubernetes/serial/StartNoK8sWithVersion (0.43s)

                                                
                                                
=== RUN   TestNoKubernetes/serial/StartNoK8sWithVersion
no_kubernetes_test.go:83: (dbg) Run:  out/minikube-windows-amd64.exe start -p NoKubernetes-632800 --no-kubernetes --kubernetes-version=1.20 --driver=hyperv
no_kubernetes_test.go:83: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p NoKubernetes-632800 --no-kubernetes --kubernetes-version=1.20 --driver=hyperv: exit status 14 (427.3454ms)

                                                
                                                
-- stdout --
	* [NoKubernetes-632800] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	
	

                                                
                                                
-- /stdout --
** stderr ** 
	X Exiting due to MK_USAGE: cannot specify --kubernetes-version with --no-kubernetes,
	to unset a global config run:
	
	$ minikube config unset kubernetes-version

                                                
                                                
** /stderr **
--- PASS: TestNoKubernetes/serial/StartNoK8sWithVersion (0.43s)

                                                
                                    

Test skip (32/193)

x
+
TestDownloadOnly/v1.20.0/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.20.0/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.20.0/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.20.0/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.20.0/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/cached-images (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/cached-images
aaa_download_only_test.go:129: Preload exists, images won't be cached
--- SKIP: TestDownloadOnly/v1.32.2/cached-images (0.00s)

                                                
                                    
x
+
TestDownloadOnly/v1.32.2/binaries (0s)

                                                
                                                
=== RUN   TestDownloadOnly/v1.32.2/binaries
aaa_download_only_test.go:151: Preload exists, binaries are present within.
--- SKIP: TestDownloadOnly/v1.32.2/binaries (0.00s)

                                                
                                    
x
+
TestDownloadOnlyKic (0s)

                                                
                                                
=== RUN   TestDownloadOnlyKic
aaa_download_only_test.go:220: skipping, only for docker or podman driver
--- SKIP: TestDownloadOnlyKic (0.00s)

                                                
                                    
x
+
TestAddons/serial/GCPAuth/RealCredentials (0s)

                                                
                                                
=== RUN   TestAddons/serial/GCPAuth/RealCredentials
addons_test.go:698: This test requires a GCE instance (excluding Cloud Shell) with a container based driver
--- SKIP: TestAddons/serial/GCPAuth/RealCredentials (0.00s)

                                                
                                    
x
+
TestAddons/parallel/Olm (0s)

                                                
                                                
=== RUN   TestAddons/parallel/Olm
=== PAUSE TestAddons/parallel/Olm

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/Olm
addons_test.go:422: Skipping OLM addon test until https://github.com/operator-framework/operator-lifecycle-manager/issues/2534 is resolved
--- SKIP: TestAddons/parallel/Olm (0.00s)

                                                
                                    
x
+
TestAddons/parallel/AmdGpuDevicePlugin (0s)

                                                
                                                
=== RUN   TestAddons/parallel/AmdGpuDevicePlugin
=== PAUSE TestAddons/parallel/AmdGpuDevicePlugin

                                                
                                                

                                                
                                                
=== CONT  TestAddons/parallel/AmdGpuDevicePlugin
addons_test.go:972: skip amd gpu test on all but docker driver and amd64 platform
--- SKIP: TestAddons/parallel/AmdGpuDevicePlugin (0.00s)

                                                
                                    
x
+
TestDockerEnvContainerd (0s)

                                                
                                                
=== RUN   TestDockerEnvContainerd
docker_test.go:170: running with docker false windows amd64
docker_test.go:172: skipping: TestDockerEnvContainerd can only be run with the containerd runtime on Docker driver
--- SKIP: TestDockerEnvContainerd (0.00s)

                                                
                                    
x
+
TestKVMDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestKVMDriverInstallOrUpdate
driver_install_or_update_test.go:41: Skip if not linux.
--- SKIP: TestKVMDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperKitDriverInstallOrUpdate (0s)

                                                
                                                
=== RUN   TestHyperKitDriverInstallOrUpdate
driver_install_or_update_test.go:105: Skip if not darwin.
--- SKIP: TestHyperKitDriverInstallOrUpdate (0.00s)

                                                
                                    
x
+
TestHyperkitDriverSkipUpgrade (0s)

                                                
                                                
=== RUN   TestHyperkitDriverSkipUpgrade
driver_install_or_update_test.go:169: Skip if not darwin.
--- SKIP: TestHyperkitDriverSkipUpgrade (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/DashboardCmd (7.38s)

                                                
                                                
=== RUN   TestFunctional/parallel/DashboardCmd
=== PAUSE TestFunctional/parallel/DashboardCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DashboardCmd
functional_test.go:922: (dbg) daemon: [out/minikube-windows-amd64.exe dashboard --url --port 36195 -p functional-873100 --alsologtostderr -v=1]
functional_test.go:933: output didn't produce a URL
functional_test.go:927: (dbg) stopping [out/minikube-windows-amd64.exe dashboard --url --port 36195 -p functional-873100 --alsologtostderr -v=1] ...
helpers_test.go:502: unable to terminate pid 5656: Access is denied.
--- SKIP: TestFunctional/parallel/DashboardCmd (7.38s)

                                                
                                    
x
+
TestFunctional/parallel/DryRun (5.03s)

                                                
                                                
=== RUN   TestFunctional/parallel/DryRun
=== PAUSE TestFunctional/parallel/DryRun

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/DryRun
functional_test.go:991: (dbg) Run:  out/minikube-windows-amd64.exe start -p functional-873100 --dry-run --memory 250MB --alsologtostderr --driver=hyperv
functional_test.go:991: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p functional-873100 --dry-run --memory 250MB --alsologtostderr --driver=hyperv: exit status 1 (5.0298711s)

                                                
                                                
-- stdout --
	* [functional-873100] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 18:30:55.483487   12696 out.go:345] Setting OutFile to fd 1384 ...
	I0408 18:30:55.569844   12696 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:30:55.569844   12696 out.go:358] Setting ErrFile to fd 1388...
	I0408 18:30:55.569844   12696 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:30:55.587832   12696 out.go:352] Setting JSON to false
	I0408 18:30:55.590848   12696 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97841,"bootTime":1744039214,"procs":184,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:30:55.591848   12696 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:30:55.595848   12696 out.go:177] * [functional-873100] minikube v1.35.0 on Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:30:55.599850   12696 notify.go:220] Checking for updates...
	I0408 18:30:55.600861   12696 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:30:55.602850   12696 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:30:55.605847   12696 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:30:55.608849   12696 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:30:55.611848   12696 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:30:55.614850   12696 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:30:55.615848   12696 driver.go:394] Setting default libvirt URI to qemu:///system

                                                
                                                
** /stderr **
functional_test.go:997: skipping this error on HyperV till this issue is solved https://github.com/kubernetes/minikube/issues/9785
--- SKIP: TestFunctional/parallel/DryRun (5.03s)

                                                
                                    
x
+
TestFunctional/parallel/InternationalLanguage (5.03s)

                                                
                                                
=== RUN   TestFunctional/parallel/InternationalLanguage
=== PAUSE TestFunctional/parallel/InternationalLanguage

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/InternationalLanguage
functional_test.go:1037: (dbg) Run:  out/minikube-windows-amd64.exe start -p functional-873100 --dry-run --memory 250MB --alsologtostderr --driver=hyperv
functional_test.go:1037: (dbg) Non-zero exit: out/minikube-windows-amd64.exe start -p functional-873100 --dry-run --memory 250MB --alsologtostderr --driver=hyperv: exit status 1 (5.0302583s)

                                                
                                                
-- stdout --
	* [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	  - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	  - MINIKUBE_FORCE_SYSTEMD=
	  - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	  - MINIKUBE_LOCATION=20604
	  - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true

                                                
                                                
-- /stdout --
** stderr ** 
	I0408 18:31:00.513723    7940 out.go:345] Setting OutFile to fd 1184 ...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.624326    7940 out.go:358] Setting ErrFile to fd 1312...
	I0408 18:31:00.624326    7940 out.go:392] TERM=,COLORTERM=, which probably does not support color
	I0408 18:31:00.661824    7940 out.go:352] Setting JSON to false
	I0408 18:31:00.667148    7940 start.go:129] hostinfo: {"hostname":"minikube3","uptime":97846,"bootTime":1744039214,"procs":185,"os":"windows","platform":"Microsoft Windows 10 Enterprise N","platformFamily":"Standalone Workstation","platformVersion":"10.0.19045.5679 Build 19045.5679","kernelVersion":"10.0.19045.5679 Build 19045.5679","kernelArch":"x86_64","virtualizationSystem":"","virtualizationRole":"","hostId":"a0f355d5-8b6e-4346-9071-73232725d096"}
	W0408 18:31:00.667148    7940 start.go:137] gopshost.Virtualization returned error: not implemented yet
	I0408 18:31:00.674168    7940 out.go:177] * [functional-873100] minikube v1.35.0 sur Microsoft Windows 10 Enterprise N 10.0.19045.5679 Build 19045.5679
	I0408 18:31:00.678327    7940 notify.go:220] Checking for updates...
	I0408 18:31:00.680952    7940 out.go:177]   - KUBECONFIG=C:\Users\jenkins.minikube3\minikube-integration\kubeconfig
	I0408 18:31:00.684952    7940 out.go:177]   - MINIKUBE_FORCE_SYSTEMD=
	I0408 18:31:00.687953    7940 out.go:177]   - MINIKUBE_HOME=C:\Users\jenkins.minikube3\minikube-integration\.minikube
	I0408 18:31:00.690949    7940 out.go:177]   - MINIKUBE_LOCATION=20604
	I0408 18:31:00.693951    7940 out.go:177]   - MINIKUBE_SUPPRESS_DOCKER_PERFORMANCE=true
	I0408 18:31:00.696949    7940 config.go:182] Loaded profile config "functional-873100": Driver=hyperv, ContainerRuntime=docker, KubernetesVersion=v1.32.2
	I0408 18:31:00.697959    7940 driver.go:394] Setting default libvirt URI to qemu:///system

                                                
                                                
** /stderr **
functional_test.go:1042: skipping this error on HyperV till this issue is solved https://github.com/kubernetes/minikube/issues/9785
--- SKIP: TestFunctional/parallel/InternationalLanguage (5.03s)

                                                
                                    
x
+
TestFunctional/parallel/MountCmd (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/MountCmd
=== PAUSE TestFunctional/parallel/MountCmd

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/MountCmd
functional_test_mount_test.go:57: skipping: mount broken on hyperv: https://github.com/kubernetes/minikube/issues/5029
--- SKIP: TestFunctional/parallel/MountCmd (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/PodmanEnv (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/PodmanEnv
=== PAUSE TestFunctional/parallel/PodmanEnv

                                                
                                                

                                                
                                                
=== CONT  TestFunctional/parallel/PodmanEnv
functional_test.go:567: only validate podman env with docker container runtime, currently testing docker
--- SKIP: TestFunctional/parallel/PodmanEnv (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessDirect
functional_test_tunnel_test.go:258: skipping: access direct test is broken on windows: https://github.com/kubernetes/minikube/issues/8304
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessDirect (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDig (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/DNSResolutionByDscacheutil (0.00s)

                                                
                                    
x
+
TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0s)

                                                
                                                
=== RUN   TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS
functional_test_tunnel_test.go:99: DNS forwarding is only supported for Hyperkit on Darwin, skipping test DNS forwarding
--- SKIP: TestFunctional/parallel/TunnelCmd/serial/AccessThroughDNS (0.00s)

                                                
                                    
x
+
TestFunctionalNewestKubernetes (0s)

                                                
                                                
=== RUN   TestFunctionalNewestKubernetes
functional_test.go:84: 
--- SKIP: TestFunctionalNewestKubernetes (0.00s)

                                                
                                    
x
+
TestGvisorAddon (0s)

                                                
                                                
=== RUN   TestGvisorAddon
gvisor_addon_test.go:34: skipping test because --gvisor=false
--- SKIP: TestGvisorAddon (0.00s)

                                                
                                    
x
+
TestImageBuild/serial/validateImageBuildWithBuildEnv (0s)

                                                
                                                
=== RUN   TestImageBuild/serial/validateImageBuildWithBuildEnv
image_test.go:114: skipping due to https://github.com/kubernetes/minikube/issues/12431
--- SKIP: TestImageBuild/serial/validateImageBuildWithBuildEnv (0.00s)

                                                
                                    
x
+
TestKicCustomNetwork (0s)

                                                
                                                
=== RUN   TestKicCustomNetwork
kic_custom_network_test.go:34: only runs with docker driver
--- SKIP: TestKicCustomNetwork (0.00s)

                                                
                                    
x
+
TestKicExistingNetwork (0s)

                                                
                                                
=== RUN   TestKicExistingNetwork
kic_custom_network_test.go:73: only runs with docker driver
--- SKIP: TestKicExistingNetwork (0.00s)

                                                
                                    
x
+
TestKicCustomSubnet (0s)

                                                
                                                
=== RUN   TestKicCustomSubnet
kic_custom_network_test.go:102: only runs with docker/podman driver
--- SKIP: TestKicCustomSubnet (0.00s)

                                                
                                    
x
+
TestKicStaticIP (0s)

                                                
                                                
=== RUN   TestKicStaticIP
kic_custom_network_test.go:123: only run with docker/podman driver
--- SKIP: TestKicStaticIP (0.00s)

                                                
                                    
x
+
TestScheduledStopUnix (0s)

                                                
                                                
=== RUN   TestScheduledStopUnix
scheduled_stop_test.go:76: test only runs on unix
--- SKIP: TestScheduledStopUnix (0.00s)

                                                
                                    
x
+
TestSkaffold (0s)

                                                
                                                
=== RUN   TestSkaffold
skaffold_test.go:39: skipping due to https://github.com/kubernetes/minikube/issues/14232
--- SKIP: TestSkaffold (0.00s)

                                                
                                    
x
+
TestInsufficientStorage (0s)

                                                
                                                
=== RUN   TestInsufficientStorage
status_test.go:38: only runs with docker driver
--- SKIP: TestInsufficientStorage (0.00s)

                                                
                                    
x
+
TestMissingContainerUpgrade (0s)

                                                
                                                
=== RUN   TestMissingContainerUpgrade
version_upgrade_test.go:284: This test is only for Docker
--- SKIP: TestMissingContainerUpgrade (0.00s)

                                                
                                    
Copied to clipboard